knitr::opts_chunk$set(echo = TRUE, message = FALSE, warning = FALSE)
options(max.print = 999999) # we will have large outputs
library(knitr)
library(rmarkdown)
library(tidyverse) #for data cleaning and visualization
library(psych) # does factor analysis
library(GPArotation) #for additional matrix roration; but may not be necesary for the current aanalysis
library(nFactors) # find n of factors
library(MVN) # Multivariate normality test
library(mvoutlier) #multivariate outliers
library(lme4)
library(lmerTest)
library(emmeans)
library(MuMIn)
#emm_options(lmerTest.limit = 27378)
# loading dataset
btr_data <- here::here("dataset/xml_results_BTR58_2021-10-11.csv") %>%
read_csv()
# data cleaning
btr_indices <- btr_data[,7:dim(btr_data)[2]] #create a data set only including the ling indices
btr_selected_var <- as.data.frame(cbind(btr_data[,1:6], btr_indices[, colSums(btr_indices != 0) > 0])) #filter it and combine them again
btr_filtered <- btr_selected_var %>%
filter(nwords > 50) # lengths is set 100 words
btr_filtered <- as.data.frame(cbind(btr_filtered[,1:6], btr_filtered[,c("nwords","wrd_length")], btr_filtered[,9:dim(btr_filtered)[2]])) #swapping nwoards and wrdlengths column (since we do not use nwords in EFA)
#setting up the variable
attach(btr_filtered)
# Wedelieted variables based on the distributions.
verbs_delete <- c("modal_necessity", "intransitive_activity_phrasal_verb", "intransitive_occurence_phrasal_verb", "intransitive_aspectual_phrasal_verb", "copular_phrasal_verb", "transitive_activity_phrasal_verb", "transitive_mental_phrasal_verb", "transitive_communication_phrasal_verb")
adj_delete <- c("size_attributive_adj", "time_attributive_adj", "color_attributive_adj", "evaluative_attributive_adj", "relational_attributive_adj", "topical__attributive_adj", "attitudinal_adj", "likelihood_adj", "certainty_adj", "ability_willingness_adj", "personal_affect_adj", "ease_difficulty_adj")
adv_delete <- c("attitudinal_adverb", "likelihood_adverb", "nonfactive_adverb", "hedges_adverb")
others_delete <- c("wh_relative_obj_clause", "that_verb_clause_nonfactive", "that_verb_clause_attitudinal", "that_verb_clause_likelihood", "that_adjective_clause", "that_adjective_clause_attitudinal", "that_adjective_clause_likelihood", "that_noun_clause_attitudinal", "that_noun_clause_factive", "that_noun_clause_likelihood", "that_noun_clause_nonfactive", "to_clause_verb_to_speech_act", "to_clause_verb_probability", "to_clause_verb_cognition", "to_clause_adjective_certainty", "to_clause_adjective_ability_willingness", "to_clause_adjective_certainty", "to_clause_adjective_ease_difficulty", "to_clause_adjective_evaluative", "to_clause_adjective_personal_affect")
delete_<- c("infinitive_prop", "pp_all" )
btr_filtered <- btr_filtered[, !(names(btr_filtered) %in% verbs_delete)]
btr_filtered <- btr_filtered[, !(names(btr_filtered) %in% adj_delete)]
btr_filtered <- btr_filtered[, !(names(btr_filtered) %in% adv_delete)]
btr_filtered <- btr_filtered[, !(names(btr_filtered) %in% others_delete)]
btr_filtered <- btr_filtered[, !(names(btr_filtered) %in% delete_)]
head(btr_filtered)
# count the number of columns
n_col <- dim(btr_filtered)[2]
MOOC_NonMOOC_category <- here::here("dataset/BTR58_NewCategory_2021-06-29.csv") %>%
read_csv()
## These are in-house function for Multidimensional analysis
#1) *MD.reduce* takes Factor analysis results (fit object) and reduce the linguistic indices according to miminum factor loadings and communalities (see Biber, 1988, 2006).
MD.reduce <- function(fa_model, cutoff_loading, cut_off_communality) {
#This function takes, Factor analysis model, a set of values for cutoff scores
#output: a dataframe of linguistic indices, primary factor, factor loading, and communality
pattern_mat <- as.data.frame.matrix(fa_model$loadings)
comu_vec <- as.data.frame(fa_model$communality)
dim(pattern_mat)[1]
# This is a holder
Var_retained <- as.data.frame(cbind(pattern_mat[,0], rep(NA, dim(pattern_mat)[1]), rep(NA, dim(pattern_mat)[1]), rep(NA, dim(pattern_mat)[1])))
colnames(Var_retained)[1] <- "factor_loaded"
colnames(Var_retained)[2] <- "loading"
colnames(Var_retained)[3] <- "communality"
# iterate through the pattern matrix
for (f in 1:dim(pattern_mat)[2]) { #for each column
for (i in 1:dim(pattern_mat)[1]) { # for each row
if (abs(comu_vec[i,1]) >= cut_off_communality){ #check for communality criteria (above .15; Biber, 2006)
if (abs(pattern_mat[i,f]) >= cutoff_loading & abs(pattern_mat[i,f]) == max(abs(pattern_mat[i,]))) {#
Var_retained[i,1] <- names(pattern_mat)[f] # store the factor number -> change here
Var_retained[i,2] <- pattern_mat[i,f] #store the actual loading
}
}
Var_retained[i,3] <- comu_vec[i,1] #store the actual communality
}
}
return(Var_retained)
}
# *MD.scores* calculates Factor scores as described in Biber (2006).
MD.scores <- function(fa_model,reduced_item_list, data) {
#This function takes FA model, reduced variable list
factor <- matrix(NA, nrow = dim(data)[1], ncol = dim(fa_model$loadings)[2]) #create an empty matrix for MD scores
colnames(factor) <- colnames(fa_model$loadings)
for (f in 1:fa_model$factors) { #iterate for a number of factors
pos_var <- as.vector(rownames(reduced_item_list[which(reduced_item_list$factor_loaded == colnames(factor)[f] & reduced_item_list$loading >= 0),])) #extract list of var_name
neg_var <- as.vector(rownames(reduced_item_list[which(reduced_item_list$factor_loaded == colnames(factor)[f] & reduced_item_list$loading <= 0),])) #extract list of var_name
pos_indices <- data[, names(data) %in% pos_var] #subset of data based on variable name grabbed
neg_indices <- data[, names(data) %in% neg_var] #subset of data based on variable name grabbed
factor[,f] <- rowSums(scale(pos_indices)) - rowSums(scale(neg_indices)) #standardize them and add them up for each text
}
factor_score <- as.data.frame(cbind(data[,1:6], factor)) #finally, combine metadata and factor score for dataset
return(factor_score)
}
# *MD.visualize* plots the scores based on MD.scores and print a list of variables relevant to the factor alongside.
MD.visualize <- function(fa_model, reduced_data, dataset, x, xorder = F, color, wrap =F, grid =F, xlab =F, ylab = F, legend = T) {
n_factor <- fa_model$factors
factor_name <- sort(colnames(fa_model$loadings))
score <- MD.scores(fa_model, reduced_data, dataset)
xorder = tolower(x)
if (xorder == 'nlengths'){
xorder = reorder(unique(x), nchar(unique(x)))
xorder
}
for (f in 1:n_factor) {
print(paged_table(reduced_data[which(reduced_data$factor_loaded == factor_name[f]),]))
if (xlab !=F){
}
if (ylab !=F){
}
plot <- ggplot(score, aes(x = reorder(tolower(x), xorder), y = get(factor_name[f]), color = color)) +
geom_jitter(alpha = .2) +
geom_violin() +
geom_boxplot(width = .3, outlier.alpha = 0) +
theme_bw() +
theme(axis.text.x = element_text(angle = 60, hjust = 1, size = 7)) +
labs(y = paste("Dimension", f, sep = " "), x = paste(toupper(substring(deparse(substitute(x)), 1,1)), substring(deparse(substitute(x)), 2), sep="", collapse=" ") , color = deparse(substitute(color)) ) +
coord_cartesian(clip="off")
if (wrap != FALSE) {
plot <- plot + facet_wrap(wrap, scales = "free_x")
}
if (grid != FALSE) {
plot <- plot + facet_grid(grid, scales = "free_x")
}
if (legend == F) {
plot <- plot + theme(legend.position = "none")
}
print(plot)
#ggsave(paste(as.character(f), '.jpeg', sep = ''), device = 'jpeg', dpi = 400)
cat("\r\n\r\n")
}
}
head(btr_data)
xtabs(~btr_data$discipline)
## btr_data$discipline
## business education engineering humanities
## 1271 531 774 674
## natural_science other service_encounters social_science
## 627 30 22 858
xtabs(~btr_data$text_type)
## btr_data$text_type
## announcements_discussions assignment_description
## 403 390
## classroom_management_talk course_management
## 65 21
## course_packs instructional_reading
## 27 663
## instructional_video lab
## 2217 17
## lecture office_hours
## 177 11
## other_institutional_writing quiz
## 37 306
## service_encounter slides
## 22 140
## study_group syllabus
## 25 179
## textbooks
## 87
ggplot(btr_data[which(btr_data$nwords < 200),], aes(x = nwords)) +
geom_histogram()
The final set ready to be analyzed.
paged_table(btr_filtered)
n_ind <- dim(btr_filtered[,8:n_col])[2]
library(Routliers)
library(grid)
for (k in 1:n_ind) {
breaks <- pretty(range(btr_filtered[, k+7]), n = nclass.FD(btr_filtered[, k+7]), min.n = 1)
binwidth <- breaks[2] - breaks[1]
# boundary for outliers
out <- Routliers::outliers_mad(btr_filtered[,k+7], b = 1.483, threshold = 2.24) #see Kline (2015) p. 72 for the formula
out$UL_CI_MAD
p <- ggplot(btr_filtered[,8:n_col], aes(x = btr_filtered[,k + 7], y = ..density..)) +
geom_histogram(binwidth = binwidth) +
geom_density(color = "blue") +
geom_vline(xintercept = out$UL_CI_MAD, color="orange", linetype = "longdash") +
geom_vline(xintercept = out$LL_CI_MAD, color="orange", linetype = "longdash") +
geom_vline(xintercept = mean(btr_filtered[,k+7]) + 3* sd(btr_filtered[,k+7]), color = "red", linetype = 'dashed') +
geom_vline(xintercept = mean(btr_filtered[,k+7]) - 3* sd(btr_filtered[,k+7]), color = "red", linetype = 'dashed') +
labs(title = colnames(btr_filtered)[7 + k], x = colnames(btr_filtered)[7 + k])
print(p)
cat("\r\n\r\n")
}
head(btr_filtered[8:n_col])
KMO_btr <- KMO(btr_filtered[8:n_col])
KMO_btr$MSA
## [1] 0.816818
paged_table(as.data.frame(KMO_btr$MSAi))
bartlett.test(btr_filtered[8:n_col])
##
## Bartlett test of homogeneity of variances
##
## data: btr_filtered[8:n_col]
## Bartlett's K-squared = 1470539, df = 85, p-value < 2.2e-16
This is related to df in the SEM approach, just to show the size of the matrix
n_indicator = dim(btr_filtered[8:n_col])[2]
(n_indicator * (n_indicator + 1))/ 2
## [1] 3741
normal <- psych::mardia(btr_filtered[8:n_col],na.rm = TRUE,plot=TRUE)
outlier <- psych::outlier(btr_filtered[8:n_col], plot = TRUE, bad = 10, na.rm = TRUE) #needs to check the cut-off value for this
screeplot <- fa.parallel(btr_filtered[8:n_col], fa = "fa", fm = "pa", n.iter=100, error.bars = T, sim = FALSE,quant=.95)
## Parallel analysis suggests that the number of factors = 24 and the number of components = NA
#factor method to use. (minres, ml, uls, wls, gls, pa)
PA6 <- fa(btr_filtered[8:n_col],nfactors= 6, n.iter=1, rotate="Promax",
residuals=TRUE, SMC=TRUE, missing=TRUE,impute="median",
min.err = 0.001, max.iter = 50, symmetric=TRUE, warnings=TRUE, fm="pa",
alpha=.05, p=.05, oblique.scores=TRUE)
PA6 <- fa.sort(PA6, polar=FALSE)
PA6loading <- as.data.frame(cbind(PA6$loadings, PA6$communality, PA6$uniquenesses,PA6$complexity))
#write.csv(PA6loading, "MD_result/PA6Loading_forETSreport.csv")
print(PA6, digit = 3)
## Factor Analysis using method = pa
## Call: fa(r = btr_filtered[8:n_col], nfactors = 6, n.iter = 1, rotate = "Promax",
## residuals = TRUE, SMC = TRUE, missing = TRUE, impute = "median",
## min.err = 0.001, max.iter = 50, symmetric = TRUE, warnings = TRUE,
## fm = "pa", alpha = 0.05, p = 0.05, oblique.scores = TRUE)
## Standardized loadings (pattern matrix) based upon correlation matrix
## PA1 PA3 PA4 PA2 PA6 PA5
## nonfinite_prop -0.813 0.221 -0.070 0.072 0.029 -0.185
## cc_clause 0.774 -0.016 -0.081 -0.112 -0.005 -0.031
## contraction 0.754 0.049 -0.210 -0.228 -0.065 -0.050
## emphatics 0.750 0.003 -0.097 -0.111 -0.023 -0.108
## be_mv 0.732 -0.055 0.029 -0.057 -0.024 0.022
## pp_demonstrative 0.634 0.019 0.108 -0.198 0.000 -0.125
## nn_all -0.619 -0.341 -0.105 0.099 -0.087 -0.042
## pp1 0.613 0.151 -0.028 -0.074 -0.119 -0.035
## pp3_it 0.585 -0.006 -0.077 -0.186 -0.011 0.114
## mean_verbal_deps -0.568 0.264 -0.093 -0.056 0.017 -0.156
## factive_adverb 0.557 0.072 -0.104 0.067 0.048 0.016
## mlc -0.484 -0.177 -0.180 -0.032 -0.035 -0.199
## nn_abstract -0.442 0.077 0.026 0.307 -0.049 -0.190
## mltu -0.433 0.031 -0.073 0.048 0.152 -0.244
## mattr -0.411 0.133 0.000 0.284 0.046 0.249
## that_relative_clause 0.348 0.115 -0.120 0.234 0.339 -0.201
## det_nominal 0.341 0.009 0.205 -0.238 0.175 -0.020
## adverbial_subordinator_causitive 0.308 -0.024 0.073 -0.069 0.043 0.123
## pp_indefinite 0.292 0.098 -0.107 -0.135 0.046 0.129
## complementizer_that0 0.287 0.114 0.004 -0.009 -0.131 0.166
## jj_predicative 0.278 0.152 0.113 0.087 -0.075 0.053
## past_participial_clause -0.272 -0.105 0.082 0.055 0.079 -0.064
## amplifiers_adverb 0.272 -0.017 -0.066 0.087 0.020 0.081
## discourse_particle 0.251 0.026 0.228 -0.189 -0.006 -0.050
## agentless_passive -0.249 -0.079 0.248 0.038 0.086 0.047
## pv_do 0.200 0.150 0.017 -0.037 -0.023 0.027
## conjuncts_adverb -0.144 -0.062 0.136 -0.118 0.072 -0.041
## downtoners_adverb 0.124 -0.056 0.035 -0.027 0.070 -0.002
## all_phrasal_verbs 0.123 0.039 -0.031 -0.104 -0.008 0.111
## to_clause -0.174 0.803 0.021 0.053 0.075 0.011
## verb 0.183 0.695 0.173 -0.121 0.056 0.099
## non_past_tense 0.316 0.665 0.036 -0.162 -0.022 -0.216
## to_clause_verb 0.205 0.515 -0.015 -0.065 0.048 -0.050
## pp2 -0.102 0.514 -0.208 -0.171 -0.039 -0.116
## mental_verb 0.148 0.505 0.120 0.076 -0.105 0.068
## to_clause_verb_desire 0.095 0.447 -0.009 -0.023 -0.013 -0.078
## to_clause_noun -0.388 0.439 0.049 0.095 0.072 0.067
## dc_c 0.102 0.413 0.240 0.076 0.400 -0.106
## activity_verb -0.080 0.391 -0.105 -0.360 0.101 0.054
## poss_nominal -0.089 0.372 -0.250 0.015 -0.031 0.290
## mean_nominal_deps 0.017 -0.299 -0.165 0.088 0.135 -0.190
## modal_possibility 0.041 0.292 0.166 0.028 0.063 -0.112
## to_clause_verb_to_causative -0.002 0.279 -0.083 0.012 0.081 0.113
## aspectual_verb -0.159 0.267 -0.016 -0.042 0.032 0.002
## wh_clause 0.098 0.259 0.106 0.088 -0.065 -0.056
## communication_verb 0.018 0.246 0.069 -0.022 -0.009 0.170
## to_clause_adjective -0.111 0.235 0.028 0.082 0.012 0.070
## split_aux 0.143 0.192 0.035 0.011 0.042 -0.036
## causation_verb -0.053 0.192 0.089 0.087 -0.039 -0.127
## modal_predictive -0.187 0.189 0.035 -0.013 -0.030 -0.084
## by_passive -0.054 -0.175 0.078 0.065 0.102 0.070
## adverbial_subordinator_other -0.018 0.168 0.158 0.010 0.043 0.080
## that_verb_clause -0.033 0.069 0.886 -0.039 -0.120 -0.017
## that_complement_clause 0.087 0.083 0.839 0.002 -0.082 0.005
## that_verb_clause_factive -0.141 0.069 0.787 -0.117 -0.103 -0.078
## ccomp_c 0.160 0.276 0.469 0.081 -0.178 -0.041
## existence_verb -0.043 0.040 0.234 0.104 0.027 -0.075
## that_noun_clause 0.084 0.040 0.213 0.090 0.078 0.035
## wh_question -0.018 0.012 -0.168 -0.052 -0.115 -0.011
## wrd_length -0.551 -0.019 -0.003 0.588 -0.026 0.024
## cc_phrase -0.217 0.100 -0.108 0.561 -0.073 -0.056
## cc_nominal 0.033 0.052 -0.148 0.526 -0.065 -0.115
## nominalization -0.375 -0.008 0.080 0.498 -0.097 -0.136
## jj_attributive -0.167 -0.239 0.035 0.441 0.049 -0.076
## amod_nominal 0.104 -0.142 0.021 0.373 0.082 -0.008
## prep_phrase -0.213 -0.218 0.169 0.354 0.146 0.070
## adverbial_subordinator_conditional -0.006 0.174 0.044 -0.330 -0.004 -0.112
## prep_nominal 0.055 -0.291 0.116 0.301 0.101 0.017
## time_adverbials 0.225 0.014 0.142 -0.277 0.032 -0.090
## place_adverbials 0.150 0.022 0.005 -0.271 0.000 -0.119
## nn_group -0.030 -0.035 -0.103 0.242 0.004 0.176
## nn_cognitive -0.187 0.069 -0.055 0.242 -0.031 -0.061
## nn_concrete -0.195 -0.053 -0.046 -0.202 0.029 -0.090
## nn_place -0.038 -0.136 -0.058 0.191 -0.058 0.065
## nn_quantity -0.046 -0.109 -0.004 -0.150 -0.014 -0.121
## occurrence_verb 0.085 -0.060 0.048 0.127 0.015 0.040
## wh_relative_clause -0.151 -0.067 0.003 -0.208 0.824 0.435
## relcl_c 0.038 0.134 -0.244 0.142 0.807 -0.111
## wh_relative_subj_clause -0.123 -0.059 -0.008 -0.181 0.708 0.441
## relcl_nominal 0.460 0.307 -0.153 0.032 0.596 -0.023
## wh_relative_prep_clause -0.063 -0.032 0.030 -0.007 0.301 0.032
## pp3 0.140 0.038 -0.050 0.028 0.077 0.611
## past_tense 0.184 -0.191 0.036 -0.093 0.061 0.577
## nn_animate -0.094 0.117 -0.145 0.239 0.022 0.439
## perfect_aspect 0.068 0.062 0.119 0.042 0.043 0.246
## nn_technical -0.184 -0.161 0.063 -0.128 -0.002 -0.189
## h2 u2 com
## nonfinite_prop 0.7254 0.275 1.29
## cc_clause 0.5873 0.413 1.07
## contraction 0.6407 0.359 1.38
## emphatics 0.5428 0.457 1.12
## be_mv 0.5486 0.451 1.03
## pp_demonstrative 0.5565 0.443 1.35
## nn_all 0.8163 0.184 1.74
## pp1 0.4810 0.519 1.25
## pp3_it 0.4259 0.574 1.32
## mean_verbal_deps 0.3700 0.630 1.68
## factive_adverb 0.3074 0.693 1.15
## mlc 0.5394 0.461 1.97
## nn_abstract 0.3661 0.634 2.31
## mltu 0.3314 0.669 1.98
## mattr 0.2991 0.701 2.80
## that_relative_clause 0.3277 0.672 3.91
## det_nominal 0.3560 0.644 3.11
## adverbial_subordinator_causitive 0.1633 0.837 1.62
## pp_indefinite 0.1686 0.831 2.53
## complementizer_that0 0.1889 0.811 2.45
## jj_predicative 0.1720 0.828 2.47
## past_participial_clause 0.1258 0.874 1.93
## amplifiers_adverb 0.0764 0.924 1.56
## discourse_particle 0.2241 0.776 2.97
## agentless_passive 0.1133 0.887 2.56
## pv_do 0.0998 0.900 2.02
## conjuncts_adverb 0.0460 0.954 4.03
## downtoners_adverb 0.0273 0.973 2.38
## all_phrasal_verbs 0.0529 0.947 3.33
## to_clause 0.5552 0.445 1.12
## verb 0.7950 0.205 1.40
## non_past_tense 0.7967 0.203 1.82
## to_clause_verb 0.3993 0.601 1.39
## pp2 0.3540 0.646 1.80
## mental_verb 0.3849 0.615 1.49
## to_clause_verb_desire 0.2409 0.759 1.16
## to_clause_noun 0.2124 0.788 2.22
## dc_c 0.5075 0.492 2.97
## activity_verb 0.2961 0.704 2.42
## poss_nominal 0.2741 0.726 2.87
## mean_nominal_deps 0.2192 0.781 3.07
## modal_possibility 0.1417 0.858 2.11
## to_clause_verb_to_causative 0.0957 0.904 1.72
## aspectual_verb 0.0665 0.933 1.73
## wh_clause 0.1070 0.893 2.22
## communication_verb 0.1212 0.879 2.00
## to_clause_adjective 0.0569 0.943 1.96
## split_aux 0.0832 0.917 2.13
## causation_verb 0.0480 0.952 3.04
## modal_predictive 0.0525 0.947 2.52
## by_passive 0.0720 0.928 3.07
## adverbial_subordinator_other 0.0707 0.929 2.62
## that_verb_clause 0.7373 0.263 1.06
## that_complement_clause 0.7638 0.236 1.06
## that_verb_clause_factive 0.5464 0.454 1.18
## ccomp_c 0.3955 0.604 2.34
## existence_verb 0.0660 0.934 1.80
## that_noun_clause 0.1001 0.900 2.18
## wh_question 0.0616 0.938 2.04
## wrd_length 0.7977 0.202 2.00
## cc_phrase 0.4138 0.586 1.51
## cc_nominal 0.2809 0.719 1.32
## nominalization 0.4664 0.534 2.19
## jj_attributive 0.4063 0.594 1.99
## amod_nominal 0.1940 0.806 1.58
## prep_phrase 0.3670 0.633 3.50
## adverbial_subordinator_conditional 0.1789 0.821 1.82
## prep_nominal 0.2453 0.755 2.61
## time_adverbials 0.2086 0.791 2.75
## place_adverbials 0.1303 0.870 2.01
## nn_group 0.1128 0.887 2.32
## nn_cognitive 0.1179 0.882 2.39
## nn_concrete 0.0932 0.907 2.69
## nn_place 0.0828 0.917 2.64
## nn_quantity 0.0544 0.946 3.04
## occurrence_verb 0.0317 0.968 2.90
## wh_relative_clause 0.7104 0.290 1.77
## relcl_c 0.6962 0.304 1.36
## wh_relative_subj_clause 0.5628 0.437 1.92
## relcl_nominal 0.7155 0.285 2.61
## wh_relative_prep_clause 0.0953 0.905 1.16
## pp3 0.4315 0.569 1.17
## past_tense 0.4043 0.596 1.53
## nn_animate 0.2879 0.712 2.10
## perfect_aspect 0.1182 0.882 1.93
## nn_technical 0.1323 0.868 3.96
##
## PA1 PA3 PA4 PA2 PA6 PA5
## SS loadings 9.214 5.236 3.365 3.760 2.747 2.316
## Proportion Var 0.107 0.061 0.039 0.044 0.032 0.027
## Cumulative Var 0.107 0.168 0.207 0.251 0.283 0.310
## Proportion Explained 0.346 0.197 0.126 0.141 0.103 0.087
## Cumulative Proportion 0.346 0.542 0.669 0.810 0.913 1.000
##
## With factor correlations of
## PA1 PA3 PA4 PA2 PA6 PA5
## PA1 1.000 0.382 0.398 -0.217 0.134 0.210
## PA3 0.382 1.000 0.106 -0.214 -0.081 0.136
## PA4 0.398 0.106 1.000 -0.042 0.289 0.128
## PA2 -0.217 -0.214 -0.042 1.000 0.274 0.071
## PA6 0.134 -0.081 0.289 0.274 1.000 -0.070
## PA5 0.210 0.136 0.128 0.071 -0.070 1.000
##
## Mean item complexity = 2.1
## Test of the hypothesis that 6 factors are sufficient.
##
## The degrees of freedom for the null model are 3655 and the objective function was 47.619 with Chi Square of 215834.4
## The degrees of freedom for the model are 3154 and the objective function was 21.914
##
## The root mean square of the residuals (RMSR) is 0.043
## The df corrected root mean square of the residuals is 0.047
##
## The harmonic number of observations is 4563 with the empirical chi square 62578.41 with prob < 0
## The total number of observations was 4563 with Likelihood Chi Square = 99235.98 with prob < 0
##
## Tucker Lewis Index of factoring reliability = 0.4748
## RMSEA index = 0.0817 and the 95 % confidence intervals are 0.0812 0.0822
## BIC = 72661.21
## Fit based upon off diagonal values = 0.938
## Measures of factor score adequacy
## PA1 PA3 PA4 PA2 PA6
## Correlation of (regression) scores with factors 0.977 0.972 0.956 0.929 0.975
## Multiple R square of scores with factors 0.955 0.944 0.914 0.862 0.951
## Minimum correlation of possible factor scores 0.910 0.888 0.828 0.725 0.902
## PA5
## Correlation of (regression) scores with factors 0.914
## Multiple R square of scores with factors 0.836
## Minimum correlation of possible factor scores 0.671
PA6_refined <- MD.reduce(PA6, .3, .15)
paged_table(PA6_refined)
PA6_refined %>%
na.omit() %>%
dim()
## [1] 49 3
PA6_scores <- MD.scores(PA6, PA6_refined, btr_filtered)
paged_table(PA6_scores)
#write.csv(PA6_scores, "MD_result/Dimensional_score_PA6_5.0_ETSreport_modified.csv")
MD.visualize(PA6, PA6_refined, btr_filtered, x = learning_environment, color = learning_environment, grid = ~mode,legend = T, xorder = "nlengths")
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8132950 0.7253723
## cc_clause PA1 0.7742338 0.5873482
## contraction PA1 0.7542997 0.6407286
## emphatics PA1 0.7495712 0.5427764
## be_mv PA1 0.7322963 0.5486265
## pp_demonstrative PA1 0.6335961 0.5565309
## nn_all PA1 -0.6188822 0.8163238
## pp1 PA1 0.6131749 0.4810347
## pp3_it PA1 0.5852639 0.4258781
## mean_verbal_deps PA1 -0.5684253 0.3699637
## factive_adverb PA1 0.5566128 0.3073590
## mlc PA1 -0.4843049 0.5393716
## nn_abstract PA1 -0.4419376 0.3660817
## mltu PA1 -0.4329341 0.3313769
## mattr PA1 -0.4105833 0.2990983
## that_relative_clause PA1 0.3478179 0.3277435
## det_nominal PA1 0.3413118 0.3559768
## adverbial_subordinator_causitive PA1 0.3082464 0.1632622
##
##
## factor_loaded loading communality
## wrd_length PA2 0.5880763 0.7977080
## cc_phrase PA2 0.5613025 0.4137876
## cc_nominal PA2 0.5260028 0.2808598
## nominalization PA2 0.4980983 0.4664133
## jj_attributive PA2 0.4408125 0.4063362
## amod_nominal PA2 0.3734195 0.1939523
## prep_phrase PA2 0.3538703 0.3669912
## adverbial_subordinator_conditional PA2 -0.3301145 0.1789314
## prep_nominal PA2 0.3007542 0.2453117
##
##
## factor_loaded loading communality
## to_clause PA3 0.8032894 0.5551529
## verb PA3 0.6946974 0.7949891
## non_past_tense PA3 0.6651300 0.7967250
## to_clause_verb PA3 0.5146971 0.3993298
## pp2 PA3 0.5144921 0.3540182
## mental_verb PA3 0.5047000 0.3849005
## to_clause_verb_desire PA3 0.4472352 0.2408542
## to_clause_noun PA3 0.4389121 0.2124316
## dc_c PA3 0.4125804 0.5075388
## activity_verb PA3 0.3906763 0.2960751
## poss_nominal PA3 0.3721704 0.2740686
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.8855422 0.7373146
## that_complement_clause PA4 0.8385487 0.7637723
## that_verb_clause_factive PA4 0.7869121 0.5464035
## ccomp_c PA4 0.4685438 0.3955203
##
##
## factor_loaded loading communality
## pp3 PA5 0.6110859 0.4314571
## past_tense PA5 0.5767110 0.4043281
## nn_animate PA5 0.4394147 0.2878991
##
##
## factor_loaded loading communality
## wh_relative_clause PA6 0.8238232 0.7104070
## relcl_c PA6 0.8071224 0.6962011
## wh_relative_subj_clause PA6 0.7076527 0.5628490
## relcl_nominal PA6 0.5960646 0.7154814
MD.visualize(PA6, PA6_refined, btr_filtered, x = text_type, color = mode, grid = ~mode+learning_environment, legend = F, xorder = "nlengths")
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8132950 0.7253723
## cc_clause PA1 0.7742338 0.5873482
## contraction PA1 0.7542997 0.6407286
## emphatics PA1 0.7495712 0.5427764
## be_mv PA1 0.7322963 0.5486265
## pp_demonstrative PA1 0.6335961 0.5565309
## nn_all PA1 -0.6188822 0.8163238
## pp1 PA1 0.6131749 0.4810347
## pp3_it PA1 0.5852639 0.4258781
## mean_verbal_deps PA1 -0.5684253 0.3699637
## factive_adverb PA1 0.5566128 0.3073590
## mlc PA1 -0.4843049 0.5393716
## nn_abstract PA1 -0.4419376 0.3660817
## mltu PA1 -0.4329341 0.3313769
## mattr PA1 -0.4105833 0.2990983
## that_relative_clause PA1 0.3478179 0.3277435
## det_nominal PA1 0.3413118 0.3559768
## adverbial_subordinator_causitive PA1 0.3082464 0.1632622
##
##
## factor_loaded loading communality
## wrd_length PA2 0.5880763 0.7977080
## cc_phrase PA2 0.5613025 0.4137876
## cc_nominal PA2 0.5260028 0.2808598
## nominalization PA2 0.4980983 0.4664133
## jj_attributive PA2 0.4408125 0.4063362
## amod_nominal PA2 0.3734195 0.1939523
## prep_phrase PA2 0.3538703 0.3669912
## adverbial_subordinator_conditional PA2 -0.3301145 0.1789314
## prep_nominal PA2 0.3007542 0.2453117
##
##
## factor_loaded loading communality
## to_clause PA3 0.8032894 0.5551529
## verb PA3 0.6946974 0.7949891
## non_past_tense PA3 0.6651300 0.7967250
## to_clause_verb PA3 0.5146971 0.3993298
## pp2 PA3 0.5144921 0.3540182
## mental_verb PA3 0.5047000 0.3849005
## to_clause_verb_desire PA3 0.4472352 0.2408542
## to_clause_noun PA3 0.4389121 0.2124316
## dc_c PA3 0.4125804 0.5075388
## activity_verb PA3 0.3906763 0.2960751
## poss_nominal PA3 0.3721704 0.2740686
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.8855422 0.7373146
## that_complement_clause PA4 0.8385487 0.7637723
## that_verb_clause_factive PA4 0.7869121 0.5464035
## ccomp_c PA4 0.4685438 0.3955203
##
##
## factor_loaded loading communality
## pp3 PA5 0.6110859 0.4314571
## past_tense PA5 0.5767110 0.4043281
## nn_animate PA5 0.4394147 0.2878991
##
##
## factor_loaded loading communality
## wh_relative_clause PA6 0.8238232 0.7104070
## relcl_c PA6 0.8071224 0.6962011
## wh_relative_subj_clause PA6 0.7076527 0.5628490
## relcl_nominal PA6 0.5960646 0.7154814
MD.visualize(PA6, PA6_refined, btr_filtered, x = discipline, color = discipline, grid = mode~learning_environment, legend = F, xorder = "nlengths")
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8132950 0.7253723
## cc_clause PA1 0.7742338 0.5873482
## contraction PA1 0.7542997 0.6407286
## emphatics PA1 0.7495712 0.5427764
## be_mv PA1 0.7322963 0.5486265
## pp_demonstrative PA1 0.6335961 0.5565309
## nn_all PA1 -0.6188822 0.8163238
## pp1 PA1 0.6131749 0.4810347
## pp3_it PA1 0.5852639 0.4258781
## mean_verbal_deps PA1 -0.5684253 0.3699637
## factive_adverb PA1 0.5566128 0.3073590
## mlc PA1 -0.4843049 0.5393716
## nn_abstract PA1 -0.4419376 0.3660817
## mltu PA1 -0.4329341 0.3313769
## mattr PA1 -0.4105833 0.2990983
## that_relative_clause PA1 0.3478179 0.3277435
## det_nominal PA1 0.3413118 0.3559768
## adverbial_subordinator_causitive PA1 0.3082464 0.1632622
##
##
## factor_loaded loading communality
## wrd_length PA2 0.5880763 0.7977080
## cc_phrase PA2 0.5613025 0.4137876
## cc_nominal PA2 0.5260028 0.2808598
## nominalization PA2 0.4980983 0.4664133
## jj_attributive PA2 0.4408125 0.4063362
## amod_nominal PA2 0.3734195 0.1939523
## prep_phrase PA2 0.3538703 0.3669912
## adverbial_subordinator_conditional PA2 -0.3301145 0.1789314
## prep_nominal PA2 0.3007542 0.2453117
##
##
## factor_loaded loading communality
## to_clause PA3 0.8032894 0.5551529
## verb PA3 0.6946974 0.7949891
## non_past_tense PA3 0.6651300 0.7967250
## to_clause_verb PA3 0.5146971 0.3993298
## pp2 PA3 0.5144921 0.3540182
## mental_verb PA3 0.5047000 0.3849005
## to_clause_verb_desire PA3 0.4472352 0.2408542
## to_clause_noun PA3 0.4389121 0.2124316
## dc_c PA3 0.4125804 0.5075388
## activity_verb PA3 0.3906763 0.2960751
## poss_nominal PA3 0.3721704 0.2740686
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.8855422 0.7373146
## that_complement_clause PA4 0.8385487 0.7637723
## that_verb_clause_factive PA4 0.7869121 0.5464035
## ccomp_c PA4 0.4685438 0.3955203
##
##
## factor_loaded loading communality
## pp3 PA5 0.6110859 0.4314571
## past_tense PA5 0.5767110 0.4043281
## nn_animate PA5 0.4394147 0.2878991
##
##
## factor_loaded loading communality
## wh_relative_clause PA6 0.8238232 0.7104070
## relcl_c PA6 0.8071224 0.6962011
## wh_relative_subj_clause PA6 0.7076527 0.5628490
## relcl_nominal PA6 0.5960646 0.7154814
Another better fit, but slight changes. We may be prioritizing the interpretability, since none of it is favored in statistical sense. - explained 33 % of the variance. - The root mean square of the residuals (RMSR) is 0.04 - The df corrected root mean square of the residuals is 0.04 - Tucker Lewis Index of factoring reliability = 0.503 - RMSEA index = 0.08 and the 95 % confidence intervals are 0.079 0.08
PA7 <- fa(btr_filtered[8:n_col],nfactors= 7, n.iter=1, rotate="Promax", scores="Bartlett",
residuals=TRUE, SMC=TRUE, missing=TRUE,impute="median",
min.err = 0.001, max.iter = 50, symmetric=TRUE, warnings=TRUE, fm="pa",
alpha=.05, p=.05, oblique.scores=TRUE)
PA7 <- fa.sort(PA7, polar = FALSE)
print(PA7)
## Factor Analysis using method = pa
## Call: fa(r = btr_filtered[8:n_col], nfactors = 7, n.iter = 1, rotate = "Promax",
## scores = "Bartlett", residuals = TRUE, SMC = TRUE, missing = TRUE,
## impute = "median", min.err = 0.001, max.iter = 50, symmetric = TRUE,
## warnings = TRUE, fm = "pa", alpha = 0.05, p = 0.05, oblique.scores = TRUE)
## Standardized loadings (pattern matrix) based upon correlation matrix
## PA1 PA3 PA4 PA2 PA7 PA5 PA6
## nonfinite_prop -0.82 0.15 -0.05 0.04 -0.18 -0.16 0.00
## contraction 0.82 0.02 -0.16 -0.04 -0.12 -0.02 0.04
## cc_clause 0.80 -0.09 -0.01 0.04 -0.08 0.02 0.10
## emphatics 0.78 -0.03 -0.05 0.01 -0.04 -0.08 0.12
## be_mv 0.75 -0.01 0.02 -0.01 0.13 0.01 0.07
## wrd_length -0.73 0.00 -0.03 -0.02 0.28 0.06 0.31
## pp_demonstrative 0.69 0.03 0.11 0.02 -0.01 -0.14 0.00
## nn_all -0.65 -0.41 -0.04 -0.08 -0.12 0.00 -0.02
## pp3_it 0.64 0.04 -0.09 -0.02 0.02 0.09 -0.06
## pp1 0.62 0.08 0.03 -0.08 -0.10 0.03 0.12
## mean_verbal_deps -0.56 0.09 0.01 0.05 -0.38 -0.08 -0.02
## nn_abstract -0.53 0.03 0.04 -0.02 0.04 -0.14 0.22
## nominalization -0.53 -0.04 0.09 -0.07 0.17 -0.07 0.34
## factive_adverb 0.52 -0.01 -0.03 0.09 -0.03 0.08 0.19
## mattr -0.51 0.12 -0.01 0.03 0.10 0.27 0.06
## mlc -0.50 -0.43 0.01 0.03 -0.47 -0.09 0.03
## det_nominal 0.42 0.07 0.16 0.16 0.04 -0.09 -0.16
## to_clause_noun -0.42 0.41 0.03 0.06 -0.04 0.08 0.00
## prep_phrase -0.33 -0.20 0.16 0.14 0.24 0.07 0.13
## adverbial_subordinator_causitive 0.33 0.02 0.05 0.03 0.08 0.09 -0.06
## pp_indefinite 0.32 0.08 -0.09 0.04 -0.08 0.14 -0.06
## discourse_particle 0.32 0.08 0.18 -0.02 0.04 -0.10 -0.12
## time_adverbials 0.31 0.01 0.14 0.03 -0.11 -0.12 -0.15
## past_participial_clause -0.27 -0.01 0.01 0.05 0.16 -0.13 -0.05
## complementizer_that0 0.27 0.04 0.06 -0.12 -0.10 0.23 0.04
## nn_cognitive -0.26 0.03 -0.03 -0.01 0.03 -0.01 0.19
## jj_predicative 0.25 0.18 0.08 -0.07 0.12 0.06 0.10
## amplifiers_adverb 0.24 -0.01 -0.06 0.02 0.09 0.09 0.09
## place_adverbials 0.24 0.02 0.01 0.00 -0.14 -0.14 -0.13
## pv_do 0.21 0.14 0.02 -0.02 -0.02 0.04 0.02
## all_phrasal_verbs 0.15 0.01 -0.01 -0.01 -0.09 0.12 -0.07
## downtoners_adverb 0.14 0.01 -0.01 0.06 0.11 -0.05 -0.03
## verb 0.23 0.84 0.02 0.02 0.14 0.02 -0.10
## to_clause -0.18 0.76 -0.02 0.07 -0.07 0.03 0.06
## non_past_tense 0.39 0.70 -0.04 -0.01 -0.03 -0.24 0.05
## to_clause_verb 0.23 0.51 -0.05 0.05 -0.04 -0.05 0.04
## activity_verb 0.04 0.43 -0.16 0.06 -0.17 -0.01 -0.27
## modal_possibility 0.05 0.41 0.05 0.04 0.20 -0.18 0.03
## to_clause_verb_desire 0.11 0.40 -0.01 0.00 -0.09 -0.05 0.07
## pp2 -0.05 0.39 -0.15 -0.02 -0.35 -0.05 0.00
## mental_verb 0.11 0.39 0.17 -0.08 -0.13 0.16 0.13
## mean_nominal_deps -0.01 -0.36 -0.10 0.17 -0.02 -0.17 0.14
## prep_nominal -0.05 -0.33 0.16 0.12 0.16 0.05 0.19
## to_clause_verb_to_causative -0.01 0.27 -0.09 0.07 -0.03 0.12 0.00
## wh_clause 0.07 0.25 0.09 -0.05 0.03 -0.03 0.11
## to_clause_adjective -0.13 0.25 0.00 0.00 0.04 0.07 0.03
## modal_predictive -0.17 0.24 -0.02 -0.04 0.04 -0.11 -0.02
## causation_verb -0.07 0.23 0.04 -0.04 0.10 -0.14 0.09
## split_aux 0.15 0.23 0.00 0.04 0.07 -0.05 0.04
## adverbial_subordinator_other -0.02 0.22 0.10 0.02 0.10 0.04 -0.05
## aspectual_verb -0.15 0.22 0.00 0.04 -0.12 0.02 -0.03
## that_verb_clause -0.05 -0.02 0.92 -0.10 -0.05 0.01 -0.10
## that_complement_clause 0.05 -0.04 0.92 -0.06 -0.07 0.06 -0.04
## that_verb_clause_factive -0.11 0.03 0.78 -0.10 -0.05 -0.09 -0.16
## ccomp_c 0.11 0.14 0.54 -0.13 -0.11 0.05 0.12
## that_noun_clause 0.04 -0.04 0.27 0.10 -0.02 0.08 0.06
## wh_question 0.00 0.04 -0.19 -0.13 -0.02 -0.01 -0.01
## relcl_c -0.01 0.02 -0.16 0.85 -0.03 -0.11 0.14
## wh_relative_clause -0.10 -0.02 -0.03 0.75 0.06 0.30 -0.42
## wh_relative_subj_clause -0.08 -0.01 -0.03 0.64 0.05 0.33 -0.38
## relcl_nominal 0.44 0.22 -0.09 0.63 -0.02 -0.01 0.12
## dc_c 0.05 0.23 0.35 0.47 -0.17 -0.04 0.13
## that_relative_clause 0.28 0.10 -0.10 0.38 0.15 -0.19 0.29
## wh_relative_prep_clause -0.06 -0.03 0.03 0.29 0.04 0.00 -0.07
## jj_attributive -0.26 0.00 -0.13 0.01 0.61 -0.19 0.25
## amod_nominal 0.02 0.04 -0.11 0.05 0.52 -0.09 0.25
## mltu -0.49 -0.29 0.16 0.26 -0.51 -0.11 0.12
## agentless_passive -0.24 0.14 0.08 0.02 0.37 -0.10 -0.15
## by_passive -0.06 -0.03 -0.02 0.06 0.27 -0.02 -0.06
## existence_verb -0.06 0.15 0.14 0.01 0.24 -0.14 0.03
## adverbial_subordinator_conditional 0.10 0.16 0.04 -0.01 -0.21 -0.13 -0.19
## occurrence_verb 0.05 0.00 0.01 0.01 0.17 0.02 0.07
## nn_place -0.09 -0.09 -0.08 -0.07 0.16 0.06 0.10
## pp3 0.09 -0.03 0.02 0.05 -0.07 0.66 -0.13
## past_tense 0.18 -0.22 0.09 0.03 -0.04 0.59 -0.23
## nn_animate -0.21 -0.03 -0.04 0.03 -0.11 0.56 0.08
## poss_nominal -0.13 0.19 -0.13 -0.02 -0.33 0.42 0.02
## nn_technical -0.12 -0.06 -0.01 -0.02 0.08 -0.27 -0.12
## nn_group -0.12 -0.11 -0.04 0.02 0.01 0.25 0.14
## perfect_aspect 0.04 0.07 0.10 0.02 0.07 0.24 -0.05
## communication_verb 0.01 0.19 0.09 -0.01 -0.09 0.20 -0.04
## nn_quantity 0.01 -0.10 -0.01 -0.02 -0.07 -0.14 -0.09
## cc_nominal -0.14 -0.01 -0.10 -0.01 0.16 -0.02 0.47
## cc_phrase -0.40 0.03 -0.07 -0.03 0.14 0.05 0.43
## nn_concrete -0.12 0.02 -0.10 0.00 -0.03 -0.16 -0.18
## conjuncts_adverb -0.10 -0.02 0.10 0.05 0.02 -0.10 -0.14
## h2 u2 com
## nonfinite_prop 0.724 0.28 1.3
## contraction 0.643 0.36 1.1
## cc_clause 0.606 0.39 1.1
## emphatics 0.548 0.45 1.1
## be_mv 0.548 0.45 1.1
## wrd_length 0.801 0.20 1.7
## pp_demonstrative 0.556 0.44 1.1
## nn_all 0.830 0.17 1.8
## pp3_it 0.426 0.57 1.1
## pp1 0.491 0.51 1.2
## mean_verbal_deps 0.411 0.59 1.9
## nn_abstract 0.365 0.63 1.5
## nominalization 0.463 0.54 2.1
## factive_adverb 0.325 0.68 1.4
## mattr 0.298 0.70 1.8
## mlc 0.686 0.31 3.0
## det_nominal 0.358 0.64 2.2
## to_clause_noun 0.212 0.79 2.2
## prep_phrase 0.364 0.64 4.2
## adverbial_subordinator_causitive 0.164 0.84 1.4
## pp_indefinite 0.169 0.83 1.9
## discourse_particle 0.227 0.77 2.4
## time_adverbials 0.209 0.79 2.6
## past_participial_clause 0.144 0.86 2.3
## complementizer_that0 0.199 0.80 2.9
## nn_cognitive 0.119 0.88 1.9
## jj_predicative 0.174 0.83 3.4
## amplifiers_adverb 0.076 0.92 2.1
## place_adverbials 0.130 0.87 3.0
## pv_do 0.100 0.90 1.9
## all_phrasal_verbs 0.054 0.95 3.2
## downtoners_adverb 0.033 0.97 2.6
## verb 0.891 0.11 1.2
## to_clause 0.558 0.44 1.2
## non_past_tense 0.822 0.18 1.8
## to_clause_verb 0.402 0.60 1.5
## activity_verb 0.309 0.69 2.5
## modal_possibility 0.187 0.81 2.0
## to_clause_verb_desire 0.239 0.76 1.3
## pp2 0.359 0.64 2.4
## mental_verb 0.396 0.60 2.6
## mean_nominal_deps 0.234 0.77 2.5
## prep_nominal 0.255 0.74 3.2
## to_clause_verb_to_causative 0.096 0.90 1.9
## wh_clause 0.107 0.89 2.1
## to_clause_adjective 0.060 0.94 1.8
## modal_predictive 0.065 0.93 2.5
## causation_verb 0.057 0.94 2.8
## split_aux 0.089 0.91 2.3
## adverbial_subordinator_other 0.079 0.92 2.1
## aspectual_verb 0.067 0.93 2.6
## that_verb_clause 0.782 0.22 1.1
## that_complement_clause 0.850 0.15 1.0
## that_verb_clause_factive 0.553 0.45 1.2
## ccomp_c 0.439 0.56 1.6
## that_noun_clause 0.118 0.88 1.7
## wh_question 0.065 0.93 1.9
## relcl_c 0.719 0.28 1.2
## wh_relative_clause 0.707 0.29 2.0
## wh_relative_subj_clause 0.560 0.44 2.3
## relcl_nominal 0.721 0.28 2.2
## dc_c 0.584 0.42 2.9
## that_relative_clause 0.321 0.68 4.2
## wh_relative_prep_clause 0.094 0.91 1.3
## jj_attributive 0.543 0.46 2.1
## amod_nominal 0.275 0.72 1.7
## mltu 0.574 0.43 3.6
## agentless_passive 0.217 0.78 2.8
## by_passive 0.106 0.89 1.4
## existence_verb 0.090 0.91 3.3
## adverbial_subordinator_conditional 0.178 0.82 4.2
## occurrence_verb 0.036 0.96 1.5
## nn_place 0.086 0.91 4.5
## pp3 0.445 0.56 1.2
## past_tense 0.413 0.59 1.9
## nn_animate 0.331 0.67 1.4
## poss_nominal 0.322 0.68 2.8
## nn_technical 0.151 0.85 2.1
## nn_group 0.123 0.88 2.7
## perfect_aspect 0.117 0.88 2.0
## communication_verb 0.124 0.88 2.9
## nn_quantity 0.054 0.95 3.0
## cc_nominal 0.279 0.72 1.5
## cc_phrase 0.411 0.59 2.3
## nn_concrete 0.104 0.90 3.5
## conjuncts_adverb 0.048 0.95 3.9
##
## PA1 PA3 PA4 PA2 PA7 PA5 PA6
## SS loadings 10.36 5.05 3.29 2.79 2.46 2.42 1.90
## Proportion Var 0.12 0.06 0.04 0.03 0.03 0.03 0.02
## Cumulative Var 0.12 0.18 0.22 0.25 0.28 0.31 0.33
## Proportion Explained 0.37 0.18 0.12 0.10 0.09 0.09 0.07
## Cumulative Proportion 0.37 0.55 0.66 0.76 0.85 0.93 1.00
##
## With factor correlations of
## PA1 PA3 PA4 PA2 PA7 PA5 PA6
## PA1 1.00 0.41 0.40 0.07 -0.12 0.19 -0.11
## PA3 0.41 1.00 0.26 0.01 -0.29 0.18 0.05
## PA4 0.40 0.26 1.00 0.26 0.17 0.14 -0.01
## PA2 0.07 0.01 0.26 1.00 0.23 0.13 0.21
## PA7 -0.12 -0.29 0.17 0.23 1.00 0.20 -0.12
## PA5 0.19 0.18 0.14 0.13 0.20 1.00 0.20
## PA6 -0.11 0.05 -0.01 0.21 -0.12 0.20 1.00
##
## Mean item complexity = 2.2
## Test of the hypothesis that 7 factors are sufficient.
##
## The degrees of freedom for the null model are 3655 and the objective function was 47.62 with Chi Square of 215834.4
## The degrees of freedom for the model are 3074 and the objective function was 20.21
##
## The root mean square of the residuals (RMSR) is 0.04
## The df corrected root mean square of the residuals is 0.04
##
## The harmonic number of observations is 4563 with the empirical chi square 53053.04 with prob < 0
## The total number of observations was 4563 with Likelihood Chi Square = 91524.66 with prob < 0
##
## Tucker Lewis Index of factoring reliability = 0.504
## RMSEA index = 0.079 and the 95 % confidence intervals are 0.079 0.08
## BIC = 65623.94
## Fit based upon off diagonal values = 0.95
## Measures of factor score adequacy
## PA1 PA3 PA4 PA2 PA7 PA5
## Correlation of (regression) scores with factors 0.98 0.99 0.97 0.98 0.94 0.91
## Multiple R square of scores with factors 0.96 0.98 0.94 0.95 0.88 0.83
## Minimum correlation of possible factor scores 0.93 0.96 0.88 0.91 0.76 0.67
## PA6
## Correlation of (regression) scores with factors 0.91
## Multiple R square of scores with factors 0.83
## Minimum correlation of possible factor scores 0.66
PA7_reduced <- MD.reduce(PA7, .3, .15)
paged_table(PA7_reduced)
PA7_reduced %>%
na.omit() %>%
dim()
## [1] 54 3
PA7_scores <- MD.scores(PA7, PA7_reduced, btr_filtered)
paged_table(PA7_scores)
MD.visualize(PA7, PA7_reduced, btr_filtered, x = text_type, color = mode, grid = ~mode+learning_environment, legend = F)
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8235353 0.7244426
## contraction PA1 0.8186383 0.6431457
## cc_clause PA1 0.7961354 0.6064587
## emphatics PA1 0.7795884 0.5482211
## be_mv PA1 0.7459612 0.5479498
## wrd_length PA1 -0.7287759 0.8014540
## pp_demonstrative PA1 0.6948488 0.5562340
## nn_all PA1 -0.6544929 0.8300732
## pp3_it PA1 0.6386236 0.4259388
## pp1 PA1 0.6247616 0.4914034
## mean_verbal_deps PA1 -0.5596111 0.4105018
## nn_abstract PA1 -0.5310264 0.3650471
## nominalization PA1 -0.5275952 0.4631378
## factive_adverb PA1 0.5214767 0.3249478
## mattr PA1 -0.5053301 0.2980419
## mlc PA1 -0.4958467 0.6864842
## det_nominal PA1 0.4191834 0.3575159
## to_clause_noun PA1 -0.4157106 0.2123137
## prep_phrase PA1 -0.3286525 0.3639326
## adverbial_subordinator_causitive PA1 0.3261532 0.1637591
## pp_indefinite PA1 0.3241266 0.1685381
## discourse_particle PA1 0.3163154 0.2265938
## time_adverbials PA1 0.3125315 0.2088412
##
##
## factor_loaded loading communality
## relcl_c PA2 0.8461425 0.7192315
## wh_relative_clause PA2 0.7474298 0.7072877
## wh_relative_subj_clause PA2 0.6364631 0.5601752
## relcl_nominal PA2 0.6262247 0.7207488
## dc_c PA2 0.4661064 0.5837262
## that_relative_clause PA2 0.3770284 0.3211368
##
##
## factor_loaded loading communality
## verb PA3 0.8366189 0.8913202
## to_clause PA3 0.7636545 0.5577178
## non_past_tense PA3 0.7048001 0.8220477
## to_clause_verb PA3 0.5073746 0.4021488
## activity_verb PA3 0.4300264 0.3092743
## modal_possibility PA3 0.4107499 0.1870198
## to_clause_verb_desire PA3 0.4047185 0.2393614
## pp2 PA3 0.3903002 0.3594773
## mental_verb PA3 0.3888151 0.3958738
## mean_nominal_deps PA3 -0.3553521 0.2337824
## prep_nominal PA3 -0.3286380 0.2553694
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.9243763 0.7824317
## that_complement_clause PA4 0.9231932 0.8495170
## that_verb_clause_factive PA4 0.7799246 0.5533175
## ccomp_c PA4 0.5438557 0.4391453
##
##
## factor_loaded loading communality
## pp3 PA5 0.6636445 0.4447154
## past_tense PA5 0.5888479 0.4128245
## nn_animate PA5 0.5617818 0.3306635
## poss_nominal PA5 0.4224054 0.3220701
##
##
## factor_loaded loading communality
## cc_nominal PA6 0.4683098 0.2792157
## cc_phrase PA6 0.4272668 0.4114733
##
##
## factor_loaded loading communality
## jj_attributive PA7 0.6090223 0.5425127
## amod_nominal PA7 0.5180351 0.2752260
## mltu PA7 -0.5138994 0.5736179
## agentless_passive PA7 0.3684972 0.2165383
MD.visualize(PA7, PA7_reduced, btr_filtered, x = discipline, color = discipline, grid = mode~learning_environment, legend = F)
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8235353 0.7244426
## contraction PA1 0.8186383 0.6431457
## cc_clause PA1 0.7961354 0.6064587
## emphatics PA1 0.7795884 0.5482211
## be_mv PA1 0.7459612 0.5479498
## wrd_length PA1 -0.7287759 0.8014540
## pp_demonstrative PA1 0.6948488 0.5562340
## nn_all PA1 -0.6544929 0.8300732
## pp3_it PA1 0.6386236 0.4259388
## pp1 PA1 0.6247616 0.4914034
## mean_verbal_deps PA1 -0.5596111 0.4105018
## nn_abstract PA1 -0.5310264 0.3650471
## nominalization PA1 -0.5275952 0.4631378
## factive_adverb PA1 0.5214767 0.3249478
## mattr PA1 -0.5053301 0.2980419
## mlc PA1 -0.4958467 0.6864842
## det_nominal PA1 0.4191834 0.3575159
## to_clause_noun PA1 -0.4157106 0.2123137
## prep_phrase PA1 -0.3286525 0.3639326
## adverbial_subordinator_causitive PA1 0.3261532 0.1637591
## pp_indefinite PA1 0.3241266 0.1685381
## discourse_particle PA1 0.3163154 0.2265938
## time_adverbials PA1 0.3125315 0.2088412
##
##
## factor_loaded loading communality
## relcl_c PA2 0.8461425 0.7192315
## wh_relative_clause PA2 0.7474298 0.7072877
## wh_relative_subj_clause PA2 0.6364631 0.5601752
## relcl_nominal PA2 0.6262247 0.7207488
## dc_c PA2 0.4661064 0.5837262
## that_relative_clause PA2 0.3770284 0.3211368
##
##
## factor_loaded loading communality
## verb PA3 0.8366189 0.8913202
## to_clause PA3 0.7636545 0.5577178
## non_past_tense PA3 0.7048001 0.8220477
## to_clause_verb PA3 0.5073746 0.4021488
## activity_verb PA3 0.4300264 0.3092743
## modal_possibility PA3 0.4107499 0.1870198
## to_clause_verb_desire PA3 0.4047185 0.2393614
## pp2 PA3 0.3903002 0.3594773
## mental_verb PA3 0.3888151 0.3958738
## mean_nominal_deps PA3 -0.3553521 0.2337824
## prep_nominal PA3 -0.3286380 0.2553694
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.9243763 0.7824317
## that_complement_clause PA4 0.9231932 0.8495170
## that_verb_clause_factive PA4 0.7799246 0.5533175
## ccomp_c PA4 0.5438557 0.4391453
##
##
## factor_loaded loading communality
## pp3 PA5 0.6636445 0.4447154
## past_tense PA5 0.5888479 0.4128245
## nn_animate PA5 0.5617818 0.3306635
## poss_nominal PA5 0.4224054 0.3220701
##
##
## factor_loaded loading communality
## cc_nominal PA6 0.4683098 0.2792157
## cc_phrase PA6 0.4272668 0.4114733
##
##
## factor_loaded loading communality
## jj_attributive PA7 0.6090223 0.5425127
## amod_nominal PA7 0.5180351 0.2752260
## mltu PA7 -0.5138994 0.5736179
## agentless_passive PA7 0.3684972 0.2165383
New analysis: Heywood cases. - PA with 8 factors explained 35% of the total variance - The root mean square of the residuals (RMSR) is 0.04 - The df corrected root mean square of the residuals is 0.04 - Tucker Lewis Index of factoring reliability = 0.521 - RMSEA index = 0.079 and the 95 % confidence intervals are 0.078 0.079
PA8 <- fa(btr_filtered[8:n_col],nfactors= 8, n.iter=1, rotate="Promax",
residuals=TRUE, SMC=TRUE, missing=TRUE,impute="median",
min.err = 0.001, max.iter = 50, symmetric=TRUE, warnings=TRUE, fm="pa",
alpha=.05, p=.05, oblique.scores=TRUE)
#PA8 <- fa.sort(PA8, polar = FALSE)
#print(PA8)
#PA8_reduced <- MD.reduce(PA8, .3, .15)
#paged_table(PA8_reduced)
#PA8_reduced %>%
# na.omit() %>%
#dim()
#PA8_scores <- MD.scores(PA8, PA8_reduced, btr_filtered)
#paged_table(PA8_scores)
plot(screeplot)
data.frame(VarianceExplained = c(PA6$Vaccounted[3,6], PA7$Vaccounted[3,7], PA8$Vaccounted[3,8]), row.names = c('6-factor', '7-factor', '8-factor'))
MD.visualize(PA6, PA6_refined, btr_filtered, x = discipline, color = discipline, grid = mode~learning_environment, legend = F)
## factor_loaded loading communality
## nonfinite_prop PA1 -0.8132950 0.7253723
## cc_clause PA1 0.7742338 0.5873482
## contraction PA1 0.7542997 0.6407286
## emphatics PA1 0.7495712 0.5427764
## be_mv PA1 0.7322963 0.5486265
## pp_demonstrative PA1 0.6335961 0.5565309
## nn_all PA1 -0.6188822 0.8163238
## pp1 PA1 0.6131749 0.4810347
## pp3_it PA1 0.5852639 0.4258781
## mean_verbal_deps PA1 -0.5684253 0.3699637
## factive_adverb PA1 0.5566128 0.3073590
## mlc PA1 -0.4843049 0.5393716
## nn_abstract PA1 -0.4419376 0.3660817
## mltu PA1 -0.4329341 0.3313769
## mattr PA1 -0.4105833 0.2990983
## that_relative_clause PA1 0.3478179 0.3277435
## det_nominal PA1 0.3413118 0.3559768
## adverbial_subordinator_causitive PA1 0.3082464 0.1632622
##
##
## factor_loaded loading communality
## wrd_length PA2 0.5880763 0.7977080
## cc_phrase PA2 0.5613025 0.4137876
## cc_nominal PA2 0.5260028 0.2808598
## nominalization PA2 0.4980983 0.4664133
## jj_attributive PA2 0.4408125 0.4063362
## amod_nominal PA2 0.3734195 0.1939523
## prep_phrase PA2 0.3538703 0.3669912
## adverbial_subordinator_conditional PA2 -0.3301145 0.1789314
## prep_nominal PA2 0.3007542 0.2453117
##
##
## factor_loaded loading communality
## to_clause PA3 0.8032894 0.5551529
## verb PA3 0.6946974 0.7949891
## non_past_tense PA3 0.6651300 0.7967250
## to_clause_verb PA3 0.5146971 0.3993298
## pp2 PA3 0.5144921 0.3540182
## mental_verb PA3 0.5047000 0.3849005
## to_clause_verb_desire PA3 0.4472352 0.2408542
## to_clause_noun PA3 0.4389121 0.2124316
## dc_c PA3 0.4125804 0.5075388
## activity_verb PA3 0.3906763 0.2960751
## poss_nominal PA3 0.3721704 0.2740686
##
##
## factor_loaded loading communality
## that_verb_clause PA4 0.8855422 0.7373146
## that_complement_clause PA4 0.8385487 0.7637723
## that_verb_clause_factive PA4 0.7869121 0.5464035
## ccomp_c PA4 0.4685438 0.3955203
##
##
## factor_loaded loading communality
## pp3 PA5 0.6110859 0.4314571
## past_tense PA5 0.5767110 0.4043281
## nn_animate PA5 0.4394147 0.2878991
##
##
## factor_loaded loading communality
## wh_relative_clause PA6 0.8238232 0.7104070
## relcl_c PA6 0.8071224 0.6962011
## wh_relative_subj_clause PA6 0.7076527 0.5628490
## relcl_nominal PA6 0.5960646 0.7154814
RQ1) What are the linguistic features of technology-mediated learning environments that are encountered and produced?
RQ2) How (dis)similar are the linguistic features of technology-mediated learning environments and the academic registers represented by T2K-SWAL with regard to mode, register, and discipline??
head(PA6_scores)
describeBy(PA6_scores, group = learning_environment)
##
## Descriptive statistics by group
## group: T2K-SWAL
## vars n mean sd median trimmed mad min
## filename* 1 462 231.50 133.51 231.50 231.50 171.24 1.00
## learning_environment* 2 462 1.00 0.00 1.00 1.00 0.00 1.00
## mode* 3 462 1.37 0.48 1.00 1.34 0.00 1.00
## discipline* 4 462 4.45 2.37 4.00 4.44 2.97 1.00
## subdiscipline* 5 462 37.22 22.65 38.00 36.96 32.62 1.00
## text_type* 6 462 5.87 2.76 5.00 5.94 2.97 1.00
## PA1 7 462 6.01 11.80 10.12 6.48 11.48 -57.92
## PA3 8 462 -0.19 4.96 0.18 -0.04 5.05 -13.32
## PA4 9 462 -0.37 1.59 -0.38 -0.40 1.54 -3.86
## PA2 10 462 -1.49 6.82 -3.47 -1.92 6.72 -13.05
## PA6 11 462 -0.32 1.58 -0.47 -0.36 1.56 -4.56
## PA5 12 462 0.63 1.85 0.37 0.47 1.89 -2.45
## max range skew kurtosis se
## filename* 462.00 461.00 0.00 -1.21 6.21
## learning_environment* 1.00 0.00 NaN NaN 0.00
## mode* 2.00 1.00 0.53 -1.73 0.02
## discipline* 8.00 7.00 0.14 -1.15 0.11
## subdiscipline* 75.00 74.00 0.05 -1.31 1.05
## text_type* 10.00 9.00 0.08 -0.89 0.13
## PA1 27.63 85.55 -0.62 0.32 0.55
## PA3 15.23 28.55 -0.22 -0.33 0.23
## PA4 4.32 8.17 0.19 -0.25 0.07
## PA2 15.62 28.67 0.53 -0.76 0.32
## PA6 6.17 10.72 0.37 0.45 0.07
## PA5 9.29 11.75 0.92 1.27 0.09
## ------------------------------------------------------------
## group: TMLE
## vars n mean sd median trimmed mad min
## filename* 1 4101 2051.00 1184.00 2051.00 2051.00 1519.66 1.00
## learning_environment* 2 4101 1.00 0.00 1.00 1.00 0.00 1.00
## mode* 3 4101 1.46 0.50 1.00 1.44 0.00 1.00
## discipline* 4 4101 3.25 1.83 3.00 3.19 2.97 1.00
## subdiscipline* 5 4070 284.24 137.12 308.00 285.17 148.26 1.00
## text_type* 6 4101 4.52 1.61 5.00 4.59 0.00 1.00
## PA1 7 4101 -0.68 11.12 -0.80 -0.51 12.73 -51.34
## PA3 8 4101 0.02 6.65 -0.01 -0.06 6.40 -21.27
## PA4 9 4101 0.04 3.49 -0.58 -0.36 2.99 -4.33
## PA2 10 4101 0.17 5.29 -0.17 0.03 4.99 -23.30
## PA6 11 4101 0.04 3.25 -0.32 -0.22 2.85 -4.56
## PA5 12 4101 -0.07 2.23 -0.66 -0.37 1.94 -2.64
## max range skew kurtosis se
## filename* 4101.00 4100.00 0.00 -1.20 18.49
## learning_environment* 1.00 0.00 NaN NaN 0.00
## mode* 2.00 1.00 0.18 -1.97 0.01
## discipline* 6.00 5.00 0.16 -1.36 0.03
## subdiscipline* 562.00 561.00 -0.07 -0.93 2.15
## text_type* 8.00 7.00 -0.54 0.55 0.03
## PA1 31.68 83.02 -0.19 -0.22 0.17
## PA3 34.01 55.28 0.20 0.60 0.10
## PA4 30.39 34.72 1.57 4.64 0.05
## PA2 23.90 47.21 0.22 0.67 0.08
## PA6 31.52 36.08 1.53 7.07 0.05
## PA5 14.02 16.66 1.36 2.32 0.03
data_lengths <- dim(PA6_scores)
PA6_scores2 <- PA6_scores
for (r in 1:data_lengths[1]){
if (PA6_scores2$text_type[r] == "classroom_management_talk" & PA6_scores2$learning_environment[r] == "TMLE") {
PA6_scores2$text_type[r] <- "classroom_management_talk_tmle"
}
if (PA6_scores2$text_type[r] == "classroom_management_talk" & PA6_scores2$learning_environment[r] == "T2K-SWAL") {
PA6_scores2$text_type[r] <- "classroom_management_talk_t2kswal"
}
}
unique(PA6_scores2$text_type)
## [1] "instructional_reading" "announcements_discussions"
## [3] "assignment_description" "instructional_video"
## [5] "slides" "syllabus"
## [7] "quiz" "classroom_management_talk_tmle"
## [9] "textbooks" "course_packs"
## [11] "course_management" "other_institutional_writing"
## [13] "lecture" "service_encounter"
## [15] "study_group" "lab"
## [17] "classroom_management_talk_t2kswal" "office_hours"
attach(PA6_scores2)
head(PA6_scores2)
PA6_scores2$text_type <- factor(PA6_scores2$text_type)
PA6_scores2$text_type <- factor(PA6_scores2$text_type, levels = rev(c("classroom_management_talk_tmle", "instructional_video",
"classroom_management_talk_t2kswal", "lab", "lecture"
,"office_hours"
,"service_encounter"
,"study_group"
,"announcements_discussions"
,"assignment_description"
,"instructional_reading"
,"quiz"
,"slides"
,"syllabus"
,"course_management"
,"course_packs"
,"other_institutional_writing"
,"textbooks")))
contrasts(PA6_scores2$text_type)
## other_institutional_writing course_packs
## textbooks 0 0
## other_institutional_writing 1 0
## course_packs 0 1
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 0 0
## assignment_description 0 0
## announcements_discussions 0 0
## study_group 0 0
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## course_management syllabus slides quiz
## textbooks 0 0 0 0
## other_institutional_writing 0 0 0 0
## course_packs 0 0 0 0
## course_management 1 0 0 0
## syllabus 0 1 0 0
## slides 0 0 1 0
## quiz 0 0 0 1
## instructional_reading 0 0 0 0
## assignment_description 0 0 0 0
## announcements_discussions 0 0 0 0
## study_group 0 0 0 0
## service_encounter 0 0 0 0
## office_hours 0 0 0 0
## lecture 0 0 0 0
## lab 0 0 0 0
## classroom_management_talk_t2kswal 0 0 0 0
## instructional_video 0 0 0 0
## classroom_management_talk_tmle 0 0 0 0
## instructional_reading assignment_description
## textbooks 0 0
## other_institutional_writing 0 0
## course_packs 0 0
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 1 0
## assignment_description 0 1
## announcements_discussions 0 0
## study_group 0 0
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## announcements_discussions study_group
## textbooks 0 0
## other_institutional_writing 0 0
## course_packs 0 0
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 0 0
## assignment_description 0 0
## announcements_discussions 1 0
## study_group 0 1
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## service_encounter office_hours lecture lab
## textbooks 0 0 0 0
## other_institutional_writing 0 0 0 0
## course_packs 0 0 0 0
## course_management 0 0 0 0
## syllabus 0 0 0 0
## slides 0 0 0 0
## quiz 0 0 0 0
## instructional_reading 0 0 0 0
## assignment_description 0 0 0 0
## announcements_discussions 0 0 0 0
## study_group 0 0 0 0
## service_encounter 1 0 0 0
## office_hours 0 1 0 0
## lecture 0 0 1 0
## lab 0 0 0 1
## classroom_management_talk_t2kswal 0 0 0 0
## instructional_video 0 0 0 0
## classroom_management_talk_tmle 0 0 0 0
## classroom_management_talk_t2kswal
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 1
## instructional_video 0
## classroom_management_talk_tmle 0
## instructional_video
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 0
## instructional_video 1
## classroom_management_talk_tmle 0
## classroom_management_talk_tmle
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 0
## instructional_video 0
## classroom_management_talk_tmle 1
head(PA6_scores2)
dim_score_long <- reshape2::melt(PA6_scores2, ids = c("filename", "learning_environment", "mode",
"discipline", "subdiscipline", "text_type"),
value.name = "Dimensional_score",
variable.name = "Dimension")
head(dim_score_long)
dim_score_long$Dimension <- gsub("PA", 'DIM', dim_score_long$Dimension)
dim_score_long$Dimension <- factor(dim_score_long$Dimension, levels = c("DIM1", "DIM2", "DIM3", "DIM4", "DIM5", "DIM6"))
contrasts(dim_score_long$Dimension)
## DIM2 DIM3 DIM4 DIM5 DIM6
## DIM1 0 0 0 0 0
## DIM2 1 0 0 0 0
## DIM3 0 1 0 0 0
## DIM4 0 0 1 0 0
## DIM5 0 0 0 1 0
## DIM6 0 0 0 0 1
contrasts(dim_score_long$text_type)
## other_institutional_writing course_packs
## textbooks 0 0
## other_institutional_writing 1 0
## course_packs 0 1
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 0 0
## assignment_description 0 0
## announcements_discussions 0 0
## study_group 0 0
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## course_management syllabus slides quiz
## textbooks 0 0 0 0
## other_institutional_writing 0 0 0 0
## course_packs 0 0 0 0
## course_management 1 0 0 0
## syllabus 0 1 0 0
## slides 0 0 1 0
## quiz 0 0 0 1
## instructional_reading 0 0 0 0
## assignment_description 0 0 0 0
## announcements_discussions 0 0 0 0
## study_group 0 0 0 0
## service_encounter 0 0 0 0
## office_hours 0 0 0 0
## lecture 0 0 0 0
## lab 0 0 0 0
## classroom_management_talk_t2kswal 0 0 0 0
## instructional_video 0 0 0 0
## classroom_management_talk_tmle 0 0 0 0
## instructional_reading assignment_description
## textbooks 0 0
## other_institutional_writing 0 0
## course_packs 0 0
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 1 0
## assignment_description 0 1
## announcements_discussions 0 0
## study_group 0 0
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## announcements_discussions study_group
## textbooks 0 0
## other_institutional_writing 0 0
## course_packs 0 0
## course_management 0 0
## syllabus 0 0
## slides 0 0
## quiz 0 0
## instructional_reading 0 0
## assignment_description 0 0
## announcements_discussions 1 0
## study_group 0 1
## service_encounter 0 0
## office_hours 0 0
## lecture 0 0
## lab 0 0
## classroom_management_talk_t2kswal 0 0
## instructional_video 0 0
## classroom_management_talk_tmle 0 0
## service_encounter office_hours lecture lab
## textbooks 0 0 0 0
## other_institutional_writing 0 0 0 0
## course_packs 0 0 0 0
## course_management 0 0 0 0
## syllabus 0 0 0 0
## slides 0 0 0 0
## quiz 0 0 0 0
## instructional_reading 0 0 0 0
## assignment_description 0 0 0 0
## announcements_discussions 0 0 0 0
## study_group 0 0 0 0
## service_encounter 1 0 0 0
## office_hours 0 1 0 0
## lecture 0 0 1 0
## lab 0 0 0 1
## classroom_management_talk_t2kswal 0 0 0 0
## instructional_video 0 0 0 0
## classroom_management_talk_tmle 0 0 0 0
## classroom_management_talk_t2kswal
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 1
## instructional_video 0
## classroom_management_talk_tmle 0
## instructional_video
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 0
## instructional_video 1
## classroom_management_talk_tmle 0
## classroom_management_talk_tmle
## textbooks 0
## other_institutional_writing 0
## course_packs 0
## course_management 0
## syllabus 0
## slides 0
## quiz 0
## instructional_reading 0
## assignment_description 0
## announcements_discussions 0
## study_group 0
## service_encounter 0
## office_hours 0
## lecture 0
## lab 0
## classroom_management_talk_t2kswal 0
## instructional_video 0
## classroom_management_talk_tmle 1
dim_score_long$text_type <- droplevels(dim_score_long$text_type)
xtabs(~text_type+learning_environment)
## learning_environment
## text_type T2K-SWAL TMLE
## announcements_discussions 0 326
## assignment_description 0 355
## classroom_management_talk_t2kswal 38 0
## classroom_management_talk_tmle 0 26
## course_management 21 0
## course_packs 27 0
## instructional_reading 0 624
## instructional_video 0 2207
## lab 17 0
## lecture 177 0
## office_hours 11 0
## other_institutional_writing 37 0
## quiz 0 246
## service_encounter 22 0
## slides 0 138
## study_group 25 0
## syllabus 0 179
## textbooks 87 0
attach(dim_score_long)
unique(text_type)
## [1] instructional_reading announcements_discussions
## [3] assignment_description instructional_video
## [5] slides syllabus
## [7] quiz classroom_management_talk_tmle
## [9] textbooks course_packs
## [11] course_management other_institutional_writing
## [13] lecture service_encounter
## [15] study_group lab
## [17] classroom_management_talk_t2kswal office_hours
## 18 Levels: textbooks other_institutional_writing ... classroom_management_talk_tmle
xtabs(~discipline+learning_environment)
## learning_environment
## discipline T2K-SWAL TMLE
## business 420 6732
## education 246 2796
## engineering 360 4224
## humanities 450 3510
## natural_science 444 3018
## other 180 0
## service_encounters 132 0
## social_science 540 4326
We are interested in examining the differences among Dimension, learning environment and one of the three situational variable (text type). - The question is if we center the binary variable of mode. This will make the main effects of learning_environment independent of the mode. This may not affect the results much IF we refer to the esmeans() since it will calculate all the
a includes all the parameters (1+dummy(Dimension, “PA1”)|filename) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)|discipline/mode) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)|text_type) -> The correlations between random intercept and slopes are hard to interpret.
Uncorrelated random intercepts and slopes
(1+dummy(Dimension, “PA1”)||filename) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||discipline/mode) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||text_type) -> This would be the random effect structure what is suitable for current study.
b.1 excludes the intercepts for filename (0+dummy(Dimension, “PA1”)||filename) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||discipline/mode) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||text_type)
c.1 removes slope for filename (1|filename) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||discipline:mode) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||text_type)
c.2 removes slope for discipline:mode (1+dummy(Dimension, “PA1”)||filename) + (1|discipline:mode) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||text_type)
c.3 removes slope for text_type (1+dummy(Dimension, “PA1”)||filename) + (1+dummy(Dimension,“PA1”)+dummy(learning_environment,“tmle”)||discipline:mode) + (1|text_type)
c.12, c.23, c.123 is also possible
Here we construct two models: - Baseline is without fixed effects of texttype - text type model is our model of interests.
#baseline model
baseline_1.a <- lmer(Dimensional_score ~ Dimension +
(0+dummy(Dimension, "DIM1")|filename) +
(1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
, REML = F,data = dim_score_long,
lmerControl(optimizer="bobyqa")
)
text_type_1.b <- lmer(Dimensional_score ~ Dimension * text_type +
(0+dummy(Dimension, "DIM1")|filename) +
(1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||discipline/mode) +
(1+dummy(Dimension,"DIM1")||filename:text_type)
, REML = F,data = dim_score_long,
lmerControl(optimizer="bobyqa")
)
Turns our that nloptwrap NLOPT_LN_BOBYQA optimizer did not complain at all.
optimizer_test <- allFit(text_type_1.b, parallel = "multicore")
## bobyqa : [OK]
## Nelder_Mead : [OK]
## nlminbwrap : [OK]
## optimx.L-BFGS-B : [OK]
## nloptwrap.NLOPT_LN_NELDERMEAD : [OK]
## nloptwrap.NLOPT_LN_BOBYQA : [OK]
lmer(Score ~ text_type + (1 + text_type | discipline/mode) + + (1 | text_type))
lmer(Score ~ Dimension * text_type + (0 + Dimension | filename) + (1 + Dimension | text_type) + (1 + Dimension * text_type | discipline/mode) +
anova(baseline_1.a, text_type_1.b)
The fuller model improved the fit to a large extent.
rbind(BIC(baseline_1.a), BIC(text_type_1.b))
## [,1]
## [1,] 163334.9
## [2,] 159796.7
BIC(baseline_1.a) - BIC(text_type_1.b)
## [1] 3538.212
The same results.
rbind(AIC(baseline_1.a), AIC(text_type_1.b))
## [,1]
## [1,] 163187.0
## [2,] 158810.6
AIC(baseline_1.a) - AIC(text_type_1.b)
## [1] 4376.396
The baseline model explains only a small amount (.003), however, the fuller model explains a good deal of the dimensional scores.
MuMIn::r.squaredGLMM(baseline_1.a)
## R2m R2c
## [1,] 0.002872314 0.5503772
MuMIn::r.squaredGLMM(text_type_1.b)
## R2m R2c
## [1,] 0.4104583 0.5839393
#text_type_1.a <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a)
# #anova(baseline_1.a, text_type_1.a)
#
#
# text_type_1.a2 <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# # This is identical with our model
# text_type_1.a_mv <- lmer(Dimensional_score ~ 0 + Dimension:text_type + text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# # did not converge
# text_type_1.a_mv2 <- lmer(Dimensional_score ~ 0 + Dimension:text_type +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
#
#
# # having the main effect did not converge
# text_type_1.a_mv3 <- lmer(Dimensional_score ~ 0 + Dimension:text_type + Dimension +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# # turned out that we do not need intercepts for filename
# text_type_1.a_mv4 <- lmer(Dimensional_score ~ 0+Dimension*text_type +
# (1|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# # Then did not convverge
# text_type_1.a_mv5 <- lmer(Dimensional_score ~ 0+Dimension*text_type +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# text_type_1.a_no_cor <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# text_type_1.a_no_texttype_random <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#
# text_type_1.a_no_cor <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")+dummy(text_type,"lecture")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a_no_cor)
#
# ##
# text_type_1.a4 <- lmer(Dimensional_score ~ Dimension * text_type +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||discipline/mode) +
# (1+dummy(Dimension,"DIM1")||text_type)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a4)
#
# text_type_1.a5 <- lmer(Dimensional_score ~ Dimension * text_type +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||filename:discipline) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||filename:mode) +
# (1+dummy(Dimension,"DIM1")||filename:text_type)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a5)
#
# text_type_1.a6 <- lmer(Dimensional_score ~ Dimension * text_type +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||filename:discipline) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||filename:mode) +
# (1+dummy(Dimension,"DIM1")||filename:text_type)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a6)
#
# text_type_1.a7 <- lmer(Dimensional_score ~ Dimension * text_type +
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||discipline) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"lecture")||mode) +
# (1+dummy(Dimension,"DIM1")||text_type)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a7)
#
#
#
# text_type_1.a2 <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"instructional_video")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer="bobyqa"))
#
# #summary(text_type_1.a2)
# #contrasts(dim_score_long$text_type)
#
# # This converged, although having singular fit
# text_type_1.a1 <- lmer(Dimensional_score ~ Dimension * text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"instructional_reading")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
# #summary(text_type_1.a1)
#
# #perfect collinear
# text_type_1.a2 <- lmer(Dimensional_score ~ Dimension * learning_environment *text_type+
# (0+dummy(Dimension, "DIM1")|filename) +
# (1+dummy(Dimension,"DIM1")*dummy(text_type,"instructional_reading")|discipline:mode)
# , REML = F,data = dim_score_long,
# lmerControl(optimizer ='nloptwrap',
# optCtrl=list(method='NLOPT_LN_BOBYQA'))
#summary(text_type_1.a2)
summary(text_type_1.b)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: Dimensional_score ~ Dimension * text_type + (0 + dummy(Dimension,
## "DIM1") | filename) + (1 + dummy(Dimension, "DIM1") * dummy(text_type,
## "lecture") || discipline/mode) + (1 + dummy(Dimension, "DIM1") ||
## filename:text_type)
## Data: dim_score_long
## Control: lmerControl(optimizer = "bobyqa")
##
## AIC BIC logLik deviance df.resid
## 158810.6 159796.7 -79285.3 158570.6 27258
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -5.8880 -0.4834 -0.0734 0.4087 8.7108
##
## Random effects:
## Groups Name
## filename.text_type dummy(Dimension, "DIM1")
## filename.text_type.1 (Intercept)
## filename dummy(Dimension, "DIM1")
## mode.discipline dummy(Dimension, "DIM1"):dummy(text_type, "lecture")
## mode.discipline.1 dummy(text_type, "lecture")
## mode.discipline.2 dummy(Dimension, "DIM1")
## mode.discipline.3 (Intercept)
## discipline dummy(Dimension, "DIM1"):dummy(text_type, "lecture")
## discipline.1 dummy(text_type, "lecture")
## discipline.2 dummy(Dimension, "DIM1")
## discipline.3 (Intercept)
## Residual
## Variance Std.Dev.
## 3.405e+01 5.836e+00
## 4.887e-13 6.991e-07
## 1.316e-06 1.147e-03
## 2.964e-01 5.445e-01
## 1.185e-13 3.443e-07
## 2.751e+00 1.658e+00
## 1.216e-02 1.103e-01
## 3.752e+00 1.937e+00
## 0.000e+00 0.000e+00
## 8.228e-01 9.071e-01
## 2.750e-01 5.244e-01
## 1.579e+01 3.974e+00
## Number of obs: 27378, groups:
## filename:text_type, 4563; filename, 4563; mode:discipline, 14; discipline, 8
##
## Fixed effects:
## Estimate Std. Error
## (Intercept) -6.55600 1.09191
## DimensionDIM2 13.10669 1.15389
## DimensionDIM3 0.79814 1.15389
## DimensionDIM4 5.99647 1.15389
## DimensionDIM5 7.09269 1.15389
## DimensionDIM6 6.61025 1.15389
## text_typeother_institutional_writing -5.72948 1.97302
## text_typecourse_packs -0.62854 1.55809
## text_typecourse_management -1.05863 1.72335
## text_typesyllabus -8.69029 0.92937
## text_typeslides -5.44901 0.97844
## text_typequiz -2.68725 0.89219
## text_typeinstructional_reading -1.49147 0.81271
## text_typeassignment_description -2.59643 0.85172
## text_typeannouncements_discussions -1.06433 0.85437
## text_typestudy_group 25.85581 1.86586
## text_typeservice_encounter 25.02460 2.70527
## text_typeoffice_hours 25.42457 2.45657
## text_typelecture 19.22499 1.56828
## text_typelab 24.35565 2.11143
## text_typeclassroom_management_talk_t2kswal 19.96889 1.67267
## text_typeinstructional_video 13.35260 1.22934
## text_typeclassroom_management_talk_tmle 10.53853 1.84957
## DimensionDIM2:text_typeother_institutional_writing 6.19303 2.10989
## DimensionDIM3:text_typeother_institutional_writing 6.18621 2.10989
## DimensionDIM4:text_typeother_institutional_writing 3.55917 2.10989
## DimensionDIM5:text_typeother_institutional_writing 5.69188 2.10989
## DimensionDIM6:text_typeother_institutional_writing 5.20468 2.10989
## DimensionDIM2:text_typecourse_packs 0.36516 1.78730
## DimensionDIM3:text_typecourse_packs 1.95820 1.78730
## DimensionDIM4:text_typecourse_packs 0.28261 1.78730
## DimensionDIM5:text_typecourse_packs 1.54246 1.78730
## DimensionDIM6:text_typecourse_packs 1.11796 1.78730
## DimensionDIM2:text_typecourse_management -2.58172 1.97599
## DimensionDIM3:text_typecourse_management 6.24103 1.97599
## DimensionDIM4:text_typecourse_management 0.48046 1.97599
## DimensionDIM5:text_typecourse_management -0.21692 1.97599
## DimensionDIM6:text_typecourse_management 0.38166 1.97599
## DimensionDIM2:text_typesyllabus 4.98330 1.06491
## DimensionDIM3:text_typesyllabus 9.00544 1.06491
## DimensionDIM4:text_typesyllabus 6.67804 1.06491
## DimensionDIM5:text_typesyllabus 7.13630 1.06491
## DimensionDIM6:text_typesyllabus 6.71380 1.06491
## DimensionDIM2:text_typeslides 1.08539 1.11997
## DimensionDIM3:text_typeslides 4.04404 1.11997
## DimensionDIM4:text_typeslides 4.22298 1.11997
## DimensionDIM5:text_typeslides 4.47977 1.11997
## DimensionDIM6:text_typeslides 3.84932 1.11997
## DimensionDIM2:text_typequiz -3.59774 1.02110
## DimensionDIM3:text_typequiz 4.99871 1.02110
## DimensionDIM4:text_typequiz 1.71136 1.02110
## DimensionDIM5:text_typequiz 1.38591 1.02110
## DimensionDIM6:text_typequiz 1.97982 1.02110
## DimensionDIM2:text_typeinstructional_reading -1.57257 0.93147
## DimensionDIM3:text_typeinstructional_reading 4.92102 0.93147
## DimensionDIM4:text_typeinstructional_reading 1.09039 0.93147
## DimensionDIM5:text_typeinstructional_reading 0.78198 0.93147
## DimensionDIM6:text_typeinstructional_reading 1.02422 0.93147
## DimensionDIM2:text_typeassignment_description -3.92407 0.97569
## DimensionDIM3:text_typeassignment_description 7.40037 0.97569
## DimensionDIM4:text_typeassignment_description 2.00025 0.97569
## DimensionDIM5:text_typeassignment_description 1.64492 0.97569
## DimensionDIM6:text_typeassignment_description 1.53879 0.97569
## DimensionDIM2:text_typeannouncements_discussions -4.36590 0.97984
## DimensionDIM3:text_typeannouncements_discussions 9.11891 0.97984
## DimensionDIM4:text_typeannouncements_discussions 0.70015 0.97984
## DimensionDIM5:text_typeannouncements_discussions 0.05485 0.97984
## DimensionDIM6:text_typeannouncements_discussions 0.14952 0.97984
## DimensionDIM2:text_typestudy_group -40.22042 2.07148
## DimensionDIM3:text_typestudy_group -18.68002 2.07148
## DimensionDIM4:text_typestudy_group -25.24292 2.07148
## DimensionDIM5:text_typestudy_group -24.96415 2.07148
## DimensionDIM6:text_typestudy_group -27.65929 2.07148
## DimensionDIM2:text_typeservice_encounter -42.63832 2.80862
## DimensionDIM3:text_typeservice_encounter -14.89656 2.80862
## DimensionDIM4:text_typeservice_encounter -25.89939 2.80862
## DimensionDIM5:text_typeservice_encounter -24.87655 2.80862
## DimensionDIM6:text_typeservice_encounter -27.64170 2.80862
## DimensionDIM2:text_typeoffice_hours -39.35865 2.76565
## DimensionDIM3:text_typeoffice_hours -15.42282 2.76565
## DimensionDIM4:text_typeoffice_hours -24.01976 2.76565
## DimensionDIM5:text_typeoffice_hours -25.07106 2.76565
## DimensionDIM6:text_typeoffice_hours -26.15961 2.76565
## DimensionDIM2:text_typelecture -30.37445 1.65125
## DimensionDIM3:text_typelecture -11.59768 1.65125
## DimensionDIM4:text_typelecture -18.29199 1.65125
## DimensionDIM5:text_typelecture -18.95182 1.65125
## DimensionDIM6:text_typelecture -19.10567 1.65125
## DimensionDIM2:text_typelab -38.20880 2.35938
## DimensionDIM3:text_typelab -15.82061 2.35938
## DimensionDIM4:text_typelab -23.10842 2.35938
## DimensionDIM5:text_typelab -23.69286 2.35938
## DimensionDIM6:text_typelab -25.10842 2.35938
## DimensionDIM2:text_typeclassroom_management_talk_t2kswal -33.71256 1.84154
## DimensionDIM3:text_typeclassroom_management_talk_t2kswal -8.94001 1.84154
## DimensionDIM4:text_typeclassroom_management_talk_t2kswal -19.74920 1.84154
## DimensionDIM5:text_typeclassroom_management_talk_t2kswal -20.71336 1.84154
## DimensionDIM6:text_typeclassroom_management_talk_t2kswal -20.84339 1.84154
## DimensionDIM2:text_typeinstructional_video -21.06560 1.30247
## DimensionDIM3:text_typeinstructional_video -5.77591 1.30247
## DimensionDIM4:text_typeinstructional_video -11.52787 1.30247
## DimensionDIM5:text_typeinstructional_video -13.54584 1.30247
## DimensionDIM6:text_typeinstructional_video -12.52403 1.30247
## DimensionDIM2:text_typeclassroom_management_talk_tmle -16.31917 2.05109
## DimensionDIM3:text_typeclassroom_management_talk_tmle -0.06053 2.05109
## DimensionDIM4:text_typeclassroom_management_talk_tmle -11.18583 2.05109
## DimensionDIM5:text_typeclassroom_management_talk_tmle -11.55730 2.05109
## DimensionDIM6:text_typeclassroom_management_talk_tmle -10.33606 2.05109
## df t value
## (Intercept) 55.69584 -6.004
## DimensionDIM2 69.76215 11.359
## DimensionDIM3 69.76215 0.692
## DimensionDIM4 69.76215 5.197
## DimensionDIM5 69.76215 6.147
## DimensionDIM6 69.76215 5.729
## text_typeother_institutional_writing 114.91343 -2.904
## text_typecourse_packs 4544.80884 -0.403
## text_typecourse_management 4546.20317 -0.614
## text_typesyllabus 4546.64719 -9.351
## text_typeslides 4550.95378 -5.569
## text_typequiz 4550.43870 -3.012
## text_typeinstructional_reading 4546.26360 -1.835
## text_typeassignment_description 4547.50625 -3.048
## text_typeannouncements_discussions 4543.47857 -1.246
## text_typestudy_group 91.08848 13.857
## text_typeservice_encounter 37.38268 9.250
## text_typeoffice_hours 264.71542 10.350
## text_typelecture 19.91133 12.259
## text_typelab 147.55400 11.535
## text_typeclassroom_management_talk_t2kswal 59.13906 11.938
## text_typeinstructional_video 17.33894 10.862
## text_typeclassroom_management_talk_tmle 87.92101 5.698
## DimensionDIM2:text_typeother_institutional_writing 137.42150 2.935
## DimensionDIM3:text_typeother_institutional_writing 137.42150 2.932
## DimensionDIM4:text_typeother_institutional_writing 137.42149 1.687
## DimensionDIM5:text_typeother_institutional_writing 137.42149 2.698
## DimensionDIM6:text_typeother_institutional_writing 137.42149 2.467
## DimensionDIM2:text_typecourse_packs 7716.02436 0.204
## DimensionDIM3:text_typecourse_packs 7716.02539 1.096
## DimensionDIM4:text_typecourse_packs 7716.02433 0.158
## DimensionDIM5:text_typecourse_packs 7716.02479 0.863
## DimensionDIM6:text_typecourse_packs 7716.02422 0.626
## DimensionDIM2:text_typecourse_management 7705.98857 -1.307
## DimensionDIM3:text_typecourse_management 7705.98927 3.158
## DimensionDIM4:text_typecourse_management 7705.98850 0.243
## DimensionDIM5:text_typecourse_management 7705.98897 -0.110
## DimensionDIM6:text_typecourse_management 7705.98854 0.193
## DimensionDIM2:text_typesyllabus 7688.42257 4.680
## DimensionDIM3:text_typesyllabus 7688.43061 8.456
## DimensionDIM4:text_typesyllabus 7688.42205 6.271
## DimensionDIM5:text_typesyllabus 7688.42634 6.701
## DimensionDIM6:text_typesyllabus 7688.42203 6.305
## DimensionDIM2:text_typeslides 7666.04450 0.969
## DimensionDIM3:text_typeslides 7666.05196 3.611
## DimensionDIM4:text_typeslides 7666.04449 3.771
## DimensionDIM5:text_typeslides 7666.04808 4.000
## DimensionDIM6:text_typeslides 7666.04461 3.437
## DimensionDIM2:text_typequiz 7660.83390 -3.523
## DimensionDIM3:text_typequiz 7660.84394 4.895
## DimensionDIM4:text_typequiz 7660.83375 1.676
## DimensionDIM5:text_typequiz 7660.83831 1.357
## DimensionDIM6:text_typequiz 7660.83361 1.939
## DimensionDIM2:text_typeinstructional_reading 7694.98177 -1.688
## DimensionDIM3:text_typeinstructional_reading 7694.99528 5.283
## DimensionDIM4:text_typeinstructional_reading 7694.98106 1.171
## DimensionDIM5:text_typeinstructional_reading 7694.98801 0.840
## DimensionDIM6:text_typeinstructional_reading 7694.98072 1.100
## DimensionDIM2:text_typeassignment_description 7682.62150 -4.022
## DimensionDIM3:text_typeassignment_description 7682.63345 7.585
## DimensionDIM4:text_typeassignment_description 7682.62042 2.050
## DimensionDIM5:text_typeassignment_description 7682.62649 1.686
## DimensionDIM6:text_typeassignment_description 7682.62058 1.577
## DimensionDIM2:text_typeannouncements_discussions 7707.95033 -4.456
## DimensionDIM3:text_typeannouncements_discussions 7707.96180 9.306
## DimensionDIM4:text_typeannouncements_discussions 7707.94976 0.715
## DimensionDIM5:text_typeannouncements_discussions 7707.95512 0.056
## DimensionDIM6:text_typeannouncements_discussions 7707.94968 0.153
## DimensionDIM2:text_typestudy_group 138.35870 -19.416
## DimensionDIM3:text_typestudy_group 138.35870 -9.018
## DimensionDIM4:text_typestudy_group 138.35870 -12.186
## DimensionDIM5:text_typestudy_group 138.35870 -12.051
## DimensionDIM6:text_typestudy_group 138.35870 -13.352
## DimensionDIM2:text_typeservice_encounter 43.91609 -15.181
## DimensionDIM3:text_typeservice_encounter 43.91609 -5.304
## DimensionDIM4:text_typeservice_encounter 43.91609 -9.221
## DimensionDIM5:text_typeservice_encounter 43.91609 -8.857
## DimensionDIM6:text_typeservice_encounter 43.91609 -9.842
## DimensionDIM2:text_typeoffice_hours 424.88956 -14.231
## DimensionDIM3:text_typeoffice_hours 424.88957 -5.577
## DimensionDIM4:text_typeoffice_hours 424.88957 -8.685
## DimensionDIM5:text_typeoffice_hours 424.88957 -9.065
## DimensionDIM6:text_typeoffice_hours 424.88956 -9.459
## DimensionDIM2:text_typelecture 24.47142 -18.395
## DimensionDIM3:text_typelecture 24.47142 -7.024
## DimensionDIM4:text_typelecture 24.47142 -11.078
## DimensionDIM5:text_typelecture 24.47142 -11.477
## DimensionDIM6:text_typelecture 24.47142 -11.570
## DimensionDIM2:text_typelab 229.94396 -16.194
## DimensionDIM3:text_typelab 229.94396 -6.705
## DimensionDIM4:text_typelab 229.94396 -9.794
## DimensionDIM5:text_typelab 229.94396 -10.042
## DimensionDIM6:text_typelab 229.94396 -10.642
## DimensionDIM2:text_typeclassroom_management_talk_t2kswal 86.88635 -18.307
## DimensionDIM3:text_typeclassroom_management_talk_t2kswal 86.88635 -4.855
## DimensionDIM4:text_typeclassroom_management_talk_t2kswal 86.88635 -10.724
## DimensionDIM5:text_typeclassroom_management_talk_t2kswal 86.88635 -11.248
## DimensionDIM6:text_typeclassroom_management_talk_t2kswal 86.88635 -11.318
## DimensionDIM2:text_typeinstructional_video 21.84878 -16.174
## DimensionDIM3:text_typeinstructional_video 21.84878 -4.435
## DimensionDIM4:text_typeinstructional_video 21.84878 -8.851
## DimensionDIM5:text_typeinstructional_video 21.84878 -10.400
## DimensionDIM6:text_typeinstructional_video 21.84878 -9.616
## DimensionDIM2:text_typeclassroom_management_talk_tmle 132.94415 -7.956
## DimensionDIM3:text_typeclassroom_management_talk_tmle 132.94415 -0.030
## DimensionDIM4:text_typeclassroom_management_talk_tmle 132.94415 -5.454
## DimensionDIM5:text_typeclassroom_management_talk_tmle 132.94415 -5.635
## DimensionDIM6:text_typeclassroom_management_talk_tmle 132.94415 -5.039
## Pr(>|t|)
## (Intercept) 1.52e-07 ***
## DimensionDIM2 < 2e-16 ***
## DimensionDIM3 0.491425
## DimensionDIM4 1.92e-06 ***
## DimensionDIM5 4.35e-08 ***
## DimensionDIM6 2.36e-07 ***
## text_typeother_institutional_writing 0.004420 **
## text_typecourse_packs 0.686668
## text_typecourse_management 0.539055
## text_typesyllabus < 2e-16 ***
## text_typeslides 2.71e-08 ***
## text_typequiz 0.002610 **
## text_typeinstructional_reading 0.066543 .
## text_typeassignment_description 0.002313 **
## text_typeannouncements_discussions 0.212920
## text_typestudy_group < 2e-16 ***
## text_typeservice_encounter 3.33e-11 ***
## text_typeoffice_hours < 2e-16 ***
## text_typelecture 9.87e-11 ***
## text_typelab < 2e-16 ***
## text_typeclassroom_management_talk_t2kswal < 2e-16 ***
## text_typeinstructional_video 3.69e-09 ***
## text_typeclassroom_management_talk_tmle 1.59e-07 ***
## DimensionDIM2:text_typeother_institutional_writing 0.003908 **
## DimensionDIM3:text_typeother_institutional_writing 0.003946 **
## DimensionDIM4:text_typeother_institutional_writing 0.093892 .
## DimensionDIM5:text_typeother_institutional_writing 0.007857 **
## DimensionDIM6:text_typeother_institutional_writing 0.014862 *
## DimensionDIM2:text_typecourse_packs 0.838118
## DimensionDIM3:text_typecourse_packs 0.273278
## DimensionDIM4:text_typecourse_packs 0.874366
## DimensionDIM5:text_typecourse_packs 0.388157
## DimensionDIM6:text_typecourse_packs 0.531658
## DimensionDIM2:text_typecourse_management 0.191407
## DimensionDIM3:text_typecourse_management 0.001592 **
## DimensionDIM4:text_typecourse_management 0.807898
## DimensionDIM5:text_typecourse_management 0.912589
## DimensionDIM6:text_typecourse_management 0.846846
## DimensionDIM2:text_typesyllabus 2.92e-06 ***
## DimensionDIM3:text_typesyllabus < 2e-16 ***
## DimensionDIM4:text_typesyllabus 3.78e-10 ***
## DimensionDIM5:text_typesyllabus 2.21e-11 ***
## DimensionDIM6:text_typesyllabus 3.05e-10 ***
## DimensionDIM2:text_typeslides 0.332516
## DimensionDIM3:text_typeslides 0.000307 ***
## DimensionDIM4:text_typeslides 0.000164 ***
## DimensionDIM5:text_typeslides 6.40e-05 ***
## DimensionDIM6:text_typeslides 0.000591 ***
## DimensionDIM2:text_typequiz 0.000429 ***
## DimensionDIM3:text_typequiz 1.00e-06 ***
## DimensionDIM4:text_typequiz 0.093780 .
## DimensionDIM5:text_typequiz 0.174735
## DimensionDIM6:text_typequiz 0.052549 .
## DimensionDIM2:text_typeinstructional_reading 0.091402 .
## DimensionDIM3:text_typeinstructional_reading 1.31e-07 ***
## DimensionDIM4:text_typeinstructional_reading 0.241792
## DimensionDIM5:text_typeinstructional_reading 0.401213
## DimensionDIM6:text_typeinstructional_reading 0.271555
## DimensionDIM2:text_typeassignment_description 5.83e-05 ***
## DimensionDIM3:text_typeassignment_description 3.72e-14 ***
## DimensionDIM4:text_typeassignment_description 0.040390 *
## DimensionDIM5:text_typeassignment_description 0.091855 .
## DimensionDIM6:text_typeassignment_description 0.114808
## DimensionDIM2:text_typeannouncements_discussions 8.48e-06 ***
## DimensionDIM3:text_typeannouncements_discussions < 2e-16 ***
## DimensionDIM4:text_typeannouncements_discussions 0.474904
## DimensionDIM5:text_typeannouncements_discussions 0.955360
## DimensionDIM6:text_typeannouncements_discussions 0.878719
## DimensionDIM2:text_typestudy_group < 2e-16 ***
## DimensionDIM3:text_typestudy_group 1.42e-15 ***
## DimensionDIM4:text_typestudy_group < 2e-16 ***
## DimensionDIM5:text_typestudy_group < 2e-16 ***
## DimensionDIM6:text_typestudy_group < 2e-16 ***
## DimensionDIM2:text_typeservice_encounter < 2e-16 ***
## DimensionDIM3:text_typeservice_encounter 3.53e-06 ***
## DimensionDIM4:text_typeservice_encounter 7.80e-12 ***
## DimensionDIM5:text_typeservice_encounter 2.50e-11 ***
## DimensionDIM6:text_typeservice_encounter 1.11e-12 ***
## DimensionDIM2:text_typeoffice_hours < 2e-16 ***
## DimensionDIM3:text_typeoffice_hours 4.37e-08 ***
## DimensionDIM4:text_typeoffice_hours < 2e-16 ***
## DimensionDIM5:text_typeoffice_hours < 2e-16 ***
## DimensionDIM6:text_typeoffice_hours < 2e-16 ***
## DimensionDIM2:text_typelecture 7.73e-16 ***
## DimensionDIM3:text_typelecture 2.61e-07 ***
## DimensionDIM4:text_typelecture 5.07e-11 ***
## DimensionDIM5:text_typelecture 2.44e-11 ***
## DimensionDIM6:text_typelecture 2.06e-11 ***
## DimensionDIM2:text_typelab < 2e-16 ***
## DimensionDIM3:text_typelab 1.54e-10 ***
## DimensionDIM4:text_typelab < 2e-16 ***
## DimensionDIM5:text_typelab < 2e-16 ***
## DimensionDIM6:text_typelab < 2e-16 ***
## DimensionDIM2:text_typeclassroom_management_talk_t2kswal < 2e-16 ***
## DimensionDIM3:text_typeclassroom_management_talk_t2kswal 5.27e-06 ***
## DimensionDIM4:text_typeclassroom_management_talk_t2kswal < 2e-16 ***
## DimensionDIM5:text_typeclassroom_management_talk_t2kswal < 2e-16 ***
## DimensionDIM6:text_typeclassroom_management_talk_t2kswal < 2e-16 ***
## DimensionDIM2:text_typeinstructional_video 1.21e-13 ***
## DimensionDIM3:text_typeinstructional_video 0.000212 ***
## DimensionDIM4:text_typeinstructional_video 1.12e-08 ***
## DimensionDIM5:text_typeinstructional_video 6.34e-10 ***
## DimensionDIM6:text_typeinstructional_video 2.62e-09 ***
## DimensionDIM2:text_typeclassroom_management_talk_tmle 6.85e-13 ***
## DimensionDIM3:text_typeclassroom_management_talk_tmle 0.976501
## DimensionDIM4:text_typeclassroom_management_talk_tmle 2.33e-07 ***
## DimensionDIM5:text_typeclassroom_management_talk_tmle 1.00e-07 ***
## DimensionDIM6:text_typeclassroom_management_talk_tmle 1.50e-06 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see ?isSingular
#originally we used comparison and manually reversing the contrast, but here we did so using the emmeans function
#mm_texttype <- emmeans(text_type_1.a, pairwise ~ rev(text_type) | Dimension, adjust = "tukey")
#mm_texttype
mm_texttype.rev3 <- emmeans(text_type_1.b, revpairwise ~ rev(text_type) | Dimension, adjust = "tukey")
#test(mm_texttype.rev3)
as.data.frame(mm_texttype.rev3)
#text_type_eff <- emmeans::eff_size(mm_texttype, sigma = sigma(text_type_1.a), edf = Inf)
#text_type_eff
text_type_eff.rev3 <- emmeans::eff_size(mm_texttype.rev3, sigma = sigma(text_type_1.b), edf = Inf)
as.data.frame(text_type_eff.rev3)
#write.csv(as.data.frame(text_type_eff.rev3), 'effectsize_newrandom.csv')
## This is the modified version of comparison plot
plot(mm_texttype.rev3, comparisons = TRUE, CIs = F) +
facet_wrap(~Dimension, scale = "free_x", ncol = 1) +
scale_x_continuous(limits=c(-20,20)) +
theme_bw() +
theme(axis.text.x = element_text(face="bold", size=10))
plot(mm_texttype.rev3, comparisons = TRUE, CIs = F) +
facet_wrap(~Dimension, scale = "free_x", ncol = 1) +
theme_bw() +
theme(axis.text.x = element_text(face="bold", size = 10))
ggsave('comparison.jpeg', width = 6.5, height = 15.5, dpi = 600)